Spaces:
Sleeping
Sleeping
| ############################################################################### | |
| #python environment, main app and startup script. | |
| FROM python:3.11.5 | |
| #FROM python:3.11.9-slim | |
| #FROM python:3.11.9-alpine | |
| #FROM python:3.11-bookworm | |
| RUN echo "\n\n############################################# Dockerfile ######################################\n\n" | |
| #ENTRYPOINT ["/app/startup.sh"] | |
| #RUN apt-get update && \ | |
| # apt-get install -y libc6 && \ | |
| # rm -rf /var/lib/apt/lists/* | |
| WORKDIR /app | |
| #RUN ls -l / || ls -l /lib || ls -l /usr || ls -l /usr/lib6 || echo "### An ls failed." | |
| COPY ./requirements.txt /app/requirements.txt | |
| COPY ./semsearch.py /app/semsearch.py | |
| COPY ./startup.sh /app/startup.sh | |
| COPY ./.streamlit/main.css /app/.streamlit/main.css | |
| COPY ./app.py /app/app.py | |
| RUN chmod 755 /app/startup.sh | |
| COPY ./multi-qa-MiniLM-L6-cos-v1 /app/multi-qa-MiniLM-L6-cos-v1 | |
| RUN mkdir -p /app/inputDocs | |
| COPY ./inputDocs/* /app/inputDocs | |
| RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt | |
| RUN pip install https://files.pythonhosted.org/packages/13/87/e0cb08c2d4bd7d38ab63816b306c8b1e7cfdc0e59bd54462e8b0df069078/semantic_text_splitter-0.6.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl | |
| RUN pip show semantic-text-splitter | |
| #RUN pip install llama_cpp_python | |
| RUN FORCE_CMAKE=1 CMAKE_SYSTEM_PROCESSOR=AMD64 pip install --verbose --no-cache-dir llama-cpp-python --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu | |
| ############################################################################## | |
| # Install Weaviate | |
| WORKDIR /app/weaviate | |
| RUN wget -qO- https://github.com/weaviate/weaviate/releases/download/v1.24.10/weaviate-v1.24.10-linux-amd64.tar.gz | tar -xzf - | |
| RUN ls -al /app/weaviate | |
| # Set environment variables for Weaviate | |
| ENV PATH="/app:/app/weaviate-v1.24.10-linux-x86_64:${PATH}" | |
| # Expose the Weaviate port | |
| EXPOSE 8080 | |
| #COPY Llama-2-7B-Chat-GGUF/llama-2-7b-chat.Q4_0.gguf /app | |
| RUN cd /app; wget -v https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q4_0.gguf | |
| ############################################################################## | |
| # Install text2vec-transformers | |
| WORKDIR /app/text2vec-transformers | |
| COPY --from=semitechnologies/transformers-inference:sentence-transformers-multi-qa-MiniLM-L6-cos-v1 /app /app/text2vec-transformers | |
| COPY --from=semitechnologies/transformers-inference:sentence-transformers-multi-qa-MiniLM-L6-cos-v1 /usr/local/bin /app/text2vec-transformers/bin | |
| COPY ./multi-qa-MiniLM-L6-cos-v1 /app/app/text2vec-transformers | |
| ENV PATH="/usr/bin/local:/app/text2vec-transformers:/app/text2vec-transformers/bin:${PATH}" | |
| RUN ./custom_prerequisites.py | |
| ############################## | |
| RUN useradd -m -u 1000 user | |
| RUN chmod -R 755 /app | |
| RUN chown -R user /app | |
| RUN chgrp -R user /app | |
| WORKDIR /app | |
| USER user | |
| EXPOSE 8501 | |
| CMD streamlit run /app/app.py \ | |
| --server.headless true \ | |
| --server.enableCORS false \ | |
| --server.enableXsrfProtection false \ | |
| --server.fileWatcherType none | |