ollama / start.sh
mano-wii's picture
Use llama3.2-3B-tunned-for-blender
196cfa8 verified
raw
history blame
407 Bytes
#!/bin/bash
# Set environment variables for the ollama server
export OLLAMA_HOST=0.0.0.0
export OLLAMA_ORIGINS=https://projects.blender.org
# Start the Ollama service in the background
ollama serve &
# Wait for the service to initialize
sleep 10
# Create the model using Ollama
ollama run hf.co/mano-wii/llama3.2-3B-tunned-for-blender:Q5_K_M
# Keep the container running indefinitely
tail -f /dev/null