Spaces:
Sleeping
Sleeping
Create Dockerfile_20250617
Browse files- Dockerfile_20250617 +154 -0
Dockerfile_20250617
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Hugging Face's logo
|
| 2 |
+
Hugging Face
|
| 3 |
+
Models
|
| 4 |
+
Datasets
|
| 5 |
+
Spaces
|
| 6 |
+
Community
|
| 7 |
+
Docs
|
| 8 |
+
Enterprise
|
| 9 |
+
Pricing
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
Spaces:
|
| 14 |
+
|
| 15 |
+
MVPilgrim
|
| 16 |
+
/
|
| 17 |
+
SemanticSearchPOC
|
| 18 |
+
|
| 19 |
+
private
|
| 20 |
+
|
| 21 |
+
Logs
|
| 22 |
+
App
|
| 23 |
+
Files
|
| 24 |
+
Community
|
| 25 |
+
Settings
|
| 26 |
+
SemanticSearchPOC
|
| 27 |
+
/
|
| 28 |
+
Dockerfile
|
| 29 |
+
|
| 30 |
+
MVPilgrim
|
| 31 |
+
debug
|
| 32 |
+
7cb1a56
|
| 33 |
+
10 months ago
|
| 34 |
+
raw
|
| 35 |
+
|
| 36 |
+
Copy download link
|
| 37 |
+
history
|
| 38 |
+
blame
|
| 39 |
+
edit
|
| 40 |
+
delete
|
| 41 |
+
|
| 42 |
+
4.34 kB
|
| 43 |
+
# Start with NVIDIA CUDA 12.6 base image
|
| 44 |
+
FROM nvidia/cuda:12.2.0-base-ubuntu22.04 AS base
|
| 45 |
+
|
| 46 |
+
ENV DEBIAN_FRONTEND=noninteractive
|
| 47 |
+
ENV DEBCONF_NOWARNINGS="yes"
|
| 48 |
+
ENV CUDA_VISIBLE_DEVICES=0
|
| 49 |
+
ENV LLAMA_CUBLAS=1
|
| 50 |
+
|
| 51 |
+
# Install necessary dependencies and musl
|
| 52 |
+
RUN apt-get update && apt-get install -y \
|
| 53 |
+
software-properties-common \
|
| 54 |
+
wget \
|
| 55 |
+
musl \
|
| 56 |
+
musl-dev \
|
| 57 |
+
musl-tools \
|
| 58 |
+
libffi-dev \
|
| 59 |
+
git \
|
| 60 |
+
build-essential \
|
| 61 |
+
pkg-config \
|
| 62 |
+
cuda-toolkit-12-2 \
|
| 63 |
+
cuda-nvcc-12-2 \
|
| 64 |
+
libcublas-12-2 \
|
| 65 |
+
libcudnn8 \
|
| 66 |
+
&& apt-get clean \
|
| 67 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 68 |
+
|
| 69 |
+
# Install Python 3.11
|
| 70 |
+
RUN add-apt-repository ppa:deadsnakes/ppa && \
|
| 71 |
+
apt-get update && \
|
| 72 |
+
apt-get install -y \
|
| 73 |
+
python3.11 \
|
| 74 |
+
python3.11-venv \
|
| 75 |
+
python3.11-dev \
|
| 76 |
+
python3-pip \
|
| 77 |
+
tzdata \
|
| 78 |
+
&& rm -rf /var/lib/apt/lists/*
|
| 79 |
+
|
| 80 |
+
# Set Python 3.11 as the default python version
|
| 81 |
+
RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1
|
| 82 |
+
RUN update-alternatives --set python3 /usr/bin/python3.11
|
| 83 |
+
|
| 84 |
+
# Create softlink so that text2vec-transformer can invoke python3 when using /usr/local/bin/python.
|
| 85 |
+
RUN ln -s /usr/bin/python3.11 /usr/local/bin/python
|
| 86 |
+
|
| 87 |
+
# Set up environment variables
|
| 88 |
+
ENV LD_LIBRARY_PATH="/usr/lib/x86_64-linux-gnu:/usr/lib64:/usr/local/cuda/lib64:$LD_LIBRARY_PATH"
|
| 89 |
+
ENV PATH="/usr/local/cuda/bin:/app:/app/text2vec-transformers:/app/text2vec-transformers/bin:/usr/local/bin:/usr/bin:$PATH"
|
| 90 |
+
|
| 91 |
+
# Upgrade pip to support --break-system-packages.
|
| 92 |
+
RUN python3 -m pip install --upgrade pip
|
| 93 |
+
|
| 94 |
+
# Install requirements packages, semantic text splitter, llama_cpp.
|
| 95 |
+
COPY ./requirements.txt /app/requirements.txt
|
| 96 |
+
RUN pip3 install --break-system-packages --no-cache-dir --upgrade -r /app/requirements.txt
|
| 97 |
+
RUN pip3 install --break-system-packages https://files.pythonhosted.org/packages/13/87/e0cb08c2d4bd7d38ab63816b306c8b1e7cfdc0e59bd54462e8b0df069078/semantic_text_splitter-0.6.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
|
| 98 |
+
|
| 99 |
+
#RUN pip3 install --break-system-packages llama_cpp_python
|
| 100 |
+
#RUN FORCE_CMAKE=1 CMAKE_SYSTEM_PROCESSOR=AMD64 pip3 install --break-system-packages --verbose --no-cache-dir llama-cpp-python --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu
|
| 101 |
+
#RUN FORCE_CMAKE=1 CMAKE_SYSTEM_PROCESSOR=AMD64 pip3 install --break-system-packages --verbose --no-cache-dir llama-cpp-python
|
| 102 |
+
|
| 103 |
+
#RUN CMAKE_ARGS="-DLLAMA_CUBLAS=on -DCUDA_PATH=/usr/local/cuda-12.2 -DCUDAToolkit_ROOT=/usr/local/cuda-12.2 -DCUDAToolkit_INCLUDE_DIR=/usr/local/cuda-12/include -DCUDAToolkit_LIBRARY_DIR=/usr/local/cuda-12.2/lib64" FORCE_CMAKE=1 pip install llama-cpp-python - no-cache-dir
|
| 104 |
+
RUN CMAKE_ARGS="GGML_CUDA=on" FORCE_CMAKE=1 pip install --break-system-packages llama-cpp-python --no-cache-dir
|
| 105 |
+
|
| 106 |
+
RUN pip3 install --break-system-packages cffi
|
| 107 |
+
|
| 108 |
+
# Install text2vec-transformers
|
| 109 |
+
WORKDIR /app/text2vec-transformers
|
| 110 |
+
COPY --from=semitechnologies/transformers-inference:sentence-transformers-multi-qa-MiniLM-L6-cos-v1 /app /app/text2vec-transformers
|
| 111 |
+
COPY --from=semitechnologies/transformers-inference:sentence-transformers-multi-qa-MiniLM-L6-cos-v1 /usr/local/bin /app/text2vec-transformers/bin
|
| 112 |
+
RUN ./custom_prerequisites.py
|
| 113 |
+
|
| 114 |
+
COPY ./multi-qa-MiniLM-L6-cos-v1 /app/multi-qa-MiniLM-L6-cos-v1
|
| 115 |
+
|
| 116 |
+
# Copy application files
|
| 117 |
+
WORKDIR /app
|
| 118 |
+
COPY ./semsearch.py /app/semsearch.py
|
| 119 |
+
COPY ./startup.sh /app/startup.sh
|
| 120 |
+
COPY ./.streamlit/main.css /app/.streamlit/main.css
|
| 121 |
+
COPY ./app.py /app/app.py
|
| 122 |
+
RUN chmod 755 /app/startup.sh
|
| 123 |
+
|
| 124 |
+
# Copy input documents
|
| 125 |
+
RUN mkdir -p /app/inputDocs
|
| 126 |
+
COPY ./inputDocs/* /app/inputDocs/
|
| 127 |
+
|
| 128 |
+
# Install Weaviate
|
| 129 |
+
WORKDIR /app/weaviate
|
| 130 |
+
RUN wget -qO- https://github.com/weaviate/weaviate/releases/download/v1.24.10/weaviate-v1.24.10-linux-amd64.tar.gz | tar -xzf -
|
| 131 |
+
|
| 132 |
+
# Download Llama model
|
| 133 |
+
WORKDIR /app
|
| 134 |
+
RUN wget -v https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q4_0.gguf
|
| 135 |
+
|
| 136 |
+
# Create a non-root user
|
| 137 |
+
RUN groupadd -g 1000 user && useradd -m -u 1000 -g user user
|
| 138 |
+
|
| 139 |
+
# Set permissions
|
| 140 |
+
RUN chown -R user:user /app
|
| 141 |
+
RUN chmod -R 755 /app
|
| 142 |
+
|
| 143 |
+
# Switch to non-root user
|
| 144 |
+
USER user
|
| 145 |
+
|
| 146 |
+
# Verify Python and musl installations
|
| 147 |
+
#RUN python3 --version && \
|
| 148 |
+
# ldd --version | grep musl
|
| 149 |
+
|
| 150 |
+
EXPOSE 8080 8501
|
| 151 |
+
|
| 152 |
+
CMD ["streamlit", "run", "/app/app.py", "--server.headless", "true", "--server.enableCORS", "false", "--server.enableXsrfProtection", "false", "--server.fileWatcherType", "none"]
|
| 153 |
+
#CMD ["/app/delay.sh", "1200"]
|
| 154 |
+
|