ollama_server / run_ollama,py
broadfield-dev's picture
Create run_ollama,py
22e9f7d verified
raw
history blame
374 Bytes
import ollama
from huggingface_hub import login
def run_ollama(prompt,token=""):
# Login to Hugging Face
login(token=token)
# Load and run the model
model = ollama.load_model("tiny_llama")
response = model.run("Why is the sky blue")
print(response)
return response
if __name__ == "__main__":
response = run_ollama()
print(response)