File size: 889 Bytes
b7de47e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import streamlit as st
import os
from together import Together


client = Together(api_key=os.environ["TOGETHER_API_KEY"])


def call_llama(prompt: str) -> str:
    """
        Send a prompt to the Llama model and return the response.
        Args:
            prompt (str): The input prompt to send to the Llama model.
        Returns:
            str: The response from the Llama model.
    """

    # Create a completion request with the prompt
    response = client.chat.completions.create(

        # Use the Llama-3-8b-chat-hf model
        model="meta-llama/Llama-3-8b-chat-hf",

        # Define the prompt as a user message
        messages=[
            {
                "role": "user",
                "content": prompt  # Use the input prompt
            }
        ],
    )

    # Return the content of the first response message
    return response.choices[0].message.content