Spaces:
Running
Running
MVPilgrim
commited on
Commit
·
cde7628
1
Parent(s):
8d35f8b
debug
Browse files- Dockerfile +1 -1
Dockerfile
CHANGED
@@ -50,7 +50,7 @@ RUN pip3 install --break-system-packages https://files.pythonhosted.org/packages
|
|
50 |
#RUN FORCE_CMAKE=1 CMAKE_SYSTEM_PROCESSOR=AMD64 pip3 install --break-system-packages --verbose --no-cache-dir llama-cpp-python
|
51 |
|
52 |
#RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on -DCUDA_PATH=/usr/local/cuda-12.2 -DCUDAToolkit_ROOT=/usr/local/cuda-12.2 -DCUDAToolkit_INCLUDE_DIR=/usr/local/cuda-12/include -DCUDAToolkit_LIBRARY_DIR=/usr/local/cuda-12.2/lib64" FORCE_CMAKE=1 pip install llama-cpp-python - no-cache-dir
|
53 |
-
RUN CMAKE_ARGS="
|
54 |
|
55 |
RUN pip3 install --break-system-packages cffi
|
56 |
|
|
|
50 |
#RUN FORCE_CMAKE=1 CMAKE_SYSTEM_PROCESSOR=AMD64 pip3 install --break-system-packages --verbose --no-cache-dir llama-cpp-python
|
51 |
|
52 |
#RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on -DCUDA_PATH=/usr/local/cuda-12.2 -DCUDAToolkit_ROOT=/usr/local/cuda-12.2 -DCUDAToolkit_INCLUDE_DIR=/usr/local/cuda-12/include -DCUDAToolkit_LIBRARY_DIR=/usr/local/cuda-12.2/lib64" FORCE_CMAKE=1 pip install llama-cpp-python - no-cache-dir
|
53 |
+
RUN CMAKE_ARGS="GGML_CUDA=on" FORCE_CMAKE=1 pip install --break-system-packages llama-cpp-python --no-cache-dir
|
54 |
|
55 |
RUN pip3 install --break-system-packages cffi
|
56 |
|