Spaces:
Running
Running
############################################################################### | |
#python environment, main app and startup script. | |
FROM python:3.11.5 | |
#FROM python:3.11.9-slim | |
#FROM python:3.11.9-alpine | |
#FROM python:3.11-bookworm | |
RUN echo "\n\n############################################# Dockerfile DbgUI ######################################\n\n" | |
#ENTRYPOINT ["/app/startup.sh"] | |
#RUN apt-get update && \ | |
# apt-get install -y libc6 && \ | |
# rm -rf /var/lib/apt/lists/* | |
WORKDIR /app | |
#RUN ls -l / || ls -l /lib || ls -l /usr || ls -l /usr/lib6 || echo "### An ls failed." | |
COPY ./requirements.txt /app/requirements.txt | |
COPY ./semsearch.py /app/semsearch.py | |
COPY ./startup.sh /app/startup.sh | |
COPY ./semsearchDbgUI.py /app/semsearchDbgUI.py | |
COPY ./startupDbgUI.sh /app/startupDbgUI.sh | |
COPY ./.streamlit/main.css /app/.streamlit/main.css | |
COPY ./app.py /app/app.py | |
COPY ./cmd.sh /app/cmd.sh | |
RUN chmod 755 /app/startup.sh /app/cmd.sh | |
COPY ./multi-qa-MiniLM-L6-cos-v1 /app/multi-qa-MiniLM-L6-cos-v1 | |
RUN mkdir -p /app/inputDocs | |
COPY ./inputDocs/* /app/inputDocs | |
RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt | |
RUN pip install https://files.pythonhosted.org/packages/13/87/e0cb08c2d4bd7d38ab63816b306c8b1e7cfdc0e59bd54462e8b0df069078/semantic_text_splitter-0.6.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl | |
RUN pip show semantic-text-splitter | |
RUN pip install llama_cpp_python | |
############################################################################## | |
# Install Weaviate | |
WORKDIR /app/weaviate | |
RUN wget -qO- https://github.com/weaviate/weaviate/releases/download/v1.24.10/weaviate-v1.24.10-linux-amd64.tar.gz | tar -xzf - | |
RUN ls -al /app/weaviate | |
# Set environment variables for Weaviate | |
ENV PATH="/app:/app/weaviate-v1.24.10-linux-x86_64:${PATH}" | |
# Expose the Weaviate port | |
EXPOSE 8080 | |
#COPY Llama-2-7B-Chat-GGUF/llama-2-7b-chat.Q4_0.gguf /app | |
RUN cd /app; wget -v https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q4_0.gguf | |
############################################################################## | |
# Install text2vec-transformers | |
WORKDIR /app/text2vec-transformers | |
COPY --from=semitechnologies/transformers-inference:sentence-transformers-multi-qa-MiniLM-L6-cos-v1 /app /app/text2vec-transformers | |
COPY --from=semitechnologies/transformers-inference:sentence-transformers-multi-qa-MiniLM-L6-cos-v1 /usr/local/bin /app/text2vec-transformers/bin | |
COPY ./multi-qa-MiniLM-L6-cos-v1 /app/app/text2vec-transformers | |
ENV PATH="/usr/bin/local:/app/text2vec-transformers:/app/text2vec-transformers/bin:${PATH}" | |
#RUN pip install -r requirements.txt--server.port=8501 --server.address=0.0.0.0 | |
#RUN pip install nltk==3.8.1 optimum==1.13.2 onnxruntime==1.16.1 onnx==1.14.1 | |
RUN ./custom_prerequisites.py | |
############################## | |
RUN useradd -m -u 1000 user | |
RUN chmod -R 755 /app | |
RUN chown -R user /app | |
RUN chgrp -R user /app | |
############################################# | |
# Specify /data volume. | |
#VOLUME /data | |
WORKDIR /app | |
USER user | |
############################################################################## | |
# Start the weaviate vector database, text2vec-transformers and the semantic search app. | |
#RUN /app/startup.sh | |
#RUN --mount=type=cache,target=/data,mode=777 /app/startup.sh | |
#RUN --mount=type=cache,target=/data,mode=777 echo "### Mounting /data" | |
#CMD ["/app/startupDbgUI.sh"] | |
EXPOSE 8501 | |
#CMD /app/startup.sh; /usr/local/bin/streamlit run semsearch.py --server.port=8501 --server.address=0.0.0.0 | |
CMD streamlit run /app/app.py \ | |
--server.headless true \ | |
--server.enableCORS false \ | |
--server.enableXsrfProtection false \ | |
--server.fileWatcherType none | |
#CMD python apptst.py |