chatCPU / run.sh
SkyNetWalker's picture
Update run.sh
f148260 verified
raw
history blame
493 Bytes
#!/bin/bash
# Start the Ollama server in the background.
# The 'serve' command starts the Ollama API server.
ollama serve &
# Wait for the Ollama server to become available.
# We loop and check the Ollama API endpoint until it responds.
echo "Waiting for Ollama to start..."
while ! curl -s http://localhost:11434 > /dev/null; do
sleep 1
done
echo "Ollama started."
# Run the Gradio application.
# The Gradio app will then communicate with the Ollama server running locally.
python app.py