Spaces:
Sleeping
Sleeping
Update Dockerfile
Browse files- Dockerfile +5 -3
Dockerfile
CHANGED
@@ -6,7 +6,8 @@ FROM python:3.11.9-alpine3.20
|
|
6 |
USER root
|
7 |
|
8 |
# Installing gcc compiler and main library.
|
9 |
-
|
|
|
10 |
RUN CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS" pip install llama-cpp-python
|
11 |
|
12 |
# Copying files into folder and making it working dir.
|
@@ -20,7 +21,8 @@ RUN mkdir translator
|
|
20 |
RUN chmod -R 777 translator
|
21 |
|
22 |
# Installing wget and downloading model.
|
23 |
-
|
|
|
24 |
RUN chmod -R 777 /app/model.bin
|
25 |
# You can use other models! Or u can comment this two RUNs and include in Space/repo/Docker image own model with name "model.bin".
|
26 |
|
@@ -35,4 +37,4 @@ RUN python3 -m pip install -U pip setuptools wheel
|
|
35 |
RUN pip install --upgrade -r /app/requirements.txt
|
36 |
|
37 |
# Now it's time to run Gradio app!
|
38 |
-
CMD ["python", "gradio_app.py"]
|
|
|
6 |
USER root
|
7 |
|
8 |
# Installing gcc compiler and main library.
|
9 |
+
# ADICIONADO 'git' e 'cmake' PARA CORRIGIR O ERRO DE BUILD DO LLAMA-CPP-PYTHON
|
10 |
+
RUN apk update && apk add git cmake wget build-base python3-dev musl-dev linux-headers
|
11 |
RUN CMAKE_ARGS="-DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS" pip install llama-cpp-python
|
12 |
|
13 |
# Copying files into folder and making it working dir.
|
|
|
21 |
RUN chmod -R 777 translator
|
22 |
|
23 |
# Installing wget and downloading model.
|
24 |
+
# ALTERADO PARA USAR O SEU MODELO ESPECIFICADO
|
25 |
+
ADD https://huggingface.co/mradermacher/ReasonableLlama3-3B-Jr-GGUF/resolve/main/reasonablellama3-3b-jr-Q4_K_M.gguf /app/model.bin
|
26 |
RUN chmod -R 777 /app/model.bin
|
27 |
# You can use other models! Or u can comment this two RUNs and include in Space/repo/Docker image own model with name "model.bin".
|
28 |
|
|
|
37 |
RUN pip install --upgrade -r /app/requirements.txt
|
38 |
|
39 |
# Now it's time to run Gradio app!
|
40 |
+
CMD ["python", "gradio_app.py"]
|