|
|
|
FROM python:3.10-slim |
|
|
|
|
|
WORKDIR /app |
|
|
|
|
|
RUN apt-get update && apt-get install -y \ |
|
curl \ |
|
git \ |
|
&& rm -rf /var/lib/apt/lists/* |
|
|
|
|
|
RUN curl -fsSL https://ollama.com/install.sh | sh |
|
|
|
|
|
RUN ollama serve & \ |
|
until curl -s http://localhost:11434 > /dev/null; do \ |
|
echo 'Waiting for Ollama...'; sleep 1; \ |
|
done && \ |
|
ollama pull llama3 && \ |
|
echo "Model pulled successfully" || echo "Model pull failed" && \ |
|
ollama list > /app/models.txt && \ |
|
cat /app/models.txt |
|
|
|
|
|
COPY requirements.txt . |
|
|
|
|
|
RUN pip install --no-cache-dir -r requirements.txt |
|
|
|
|
|
COPY app.py . |
|
|
|
|
|
EXPOSE 7860 |
|
|
|
|
|
ENV OLLAMA_HOST=0.0.0.0 |
|
ENV OLLAMA_PORT=11434 |
|
|
|
|
|
CMD bash -c "ollama serve & sleep 5 && until curl -s http://localhost:11434 > /dev/null; do echo 'Waiting for Ollama...'; sleep 1; done && streamlit run app.py --server.port 7860 --server.address 0.0.0.0" |