File size: 493 Bytes
0879be9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#!/bin/bash

# Start the Ollama server in the background.
# The 'serve' command starts the Ollama API server.
ollama serve &

# Wait for the Ollama server to become available.
# We loop and check the Ollama API endpoint until it responds.
echo "Waiting for Ollama to start..."
while ! curl -s http://localhost:11434 > /dev/null; do
  sleep 1
done
echo "Ollama started."

# Run the Gradio application.
# The Gradio app will then communicate with the Ollama server running locally.
python app.py