Spaces:
Running
Running
import streamlit as st | |
import os | |
from together import Together | |
client = Together(api_key=os.environ["TOGETHER_API_KEY"]) | |
def call_llama(prompt: str) -> str: | |
""" | |
Send a prompt to the Llama model and return the response. | |
Args: | |
prompt (str): The input prompt to send to the Llama model. | |
Returns: | |
str: The response from the Llama model. | |
""" | |
# Create a completion request with the prompt | |
response = client.chat.completions.create( | |
# Use the Llama-3-8b-chat-hf model | |
model="meta-llama/Llama-3-8b-chat-hf", | |
# Define the prompt as a user message | |
messages=[ | |
{ | |
"role": "user", | |
"content": prompt # Use the input prompt | |
} | |
], | |
) | |
# Return the content of the first response message | |
return response.choices[0].message.content |