axadishaq commited on
Commit
cdf597f
·
verified ·
1 Parent(s): 1dbfd5d

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +110 -0
app.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from groq import Groq
3
+ import gradio as gr
4
+ from google.colab import userdata
5
+
6
+
7
+ # Access the API key from Colab Secrets
8
+ groq_api_key = userdata.get("GROQ_API_KEY")
9
+ if not groq_api_key:
10
+ raise ValueError("GROQ_API_KEY not found in Colab Secrets. Please add it.")
11
+
12
+ client = Groq(api_key=groq_api_key)
13
+
14
+ # IMPORTANT: Check Groq's model deprecation page (https://console.groq.com/docs/deprecations)
15
+ # for the latest available and recommended models.
16
+ # As of July 2025, I'll use a likely available one.
17
+ # You might need to update this model_name if it changes on Groq's side.
18
+ # Recommended current models: "llama-3.1-8b-instant" or "llama-3.3-70b-versatile"
19
+ DEFAULT_GROQ_MODEL = "llama-3.1-8b-instant"
20
+
21
+ def get_groq_response(messages: list, model_name: str = DEFAULT_GROQ_MODEL, temperature: float = 0.7):
22
+ """
23
+ Generates a response from Groq given a list of messages.
24
+ Args:
25
+ messages (list): A list of message dictionaries (e.g., [{"role": "user", "content": "Hello"}]).
26
+ model_name (str): The Groq model to use.
27
+ temperature (float): Controls the randomness of the output.
28
+ Returns:
29
+ str: The generated response content.
30
+ """
31
+ try:
32
+ chat_completion = client.chat.completions.create(
33
+ messages=messages,
34
+ model=model_name,
35
+ temperature=temperature,
36
+ max_tokens=1024, # Adjust as needed
37
+ stream=False,
38
+ )
39
+ return chat_completion.choices[0].message.content
40
+ except Exception as e:
41
+ print(f"Error calling Groq API: {e}")
42
+ return "An error occurred while generating a response. Please check your API key and Groq console."
43
+
44
+
45
+ # --- Simple Conversational Agent Class ---
46
+ class SimpleGroqAgent:
47
+ def __init__(self, model_name: str = DEFAULT_GROQ_MODEL, temperature: float = 0.7):
48
+ self.model_name = model_name
49
+ self.temperature = temperature
50
+ # Initialize with a system message to define the agent's persona
51
+ self.conversation_history = [{"role": "system", "content": "You are a helpful and concise AI assistant powered by Groq. You respond quickly."}]
52
+
53
+ def chat(self, user_input: str) -> str:
54
+ """
55
+ Processes user input and returns an AI response, updating conversation history.
56
+ """
57
+ # Add user's message to history
58
+ self.conversation_history.append({"role": "user", "content": user_input})
59
+
60
+ # Get response from Groq using the current history
61
+ response_content = get_groq_response(self.conversation_history, self.model_name, self.temperature)
62
+
63
+ # Add assistant's response to history
64
+ self.conversation_history.append({"role": "assistant", "content": response_content})
65
+
66
+ return response_content
67
+
68
+ def reset_conversation(self):
69
+ """Resets the conversation history."""
70
+ self.conversation_history = [{"role": "system", "content": "You are a helpful and concise AI assistant powered by Groq. You respond quickly."}]
71
+
72
+ # --- Gradio Interface Setup ---
73
+
74
+ # Instantiate your agent
75
+ groq_agent = SimpleGroqAgent()
76
+
77
+ def chat_interface_function(message, history):
78
+ """
79
+ Function to be used by Gradio's ChatInterface.
80
+ 'history' is a list of [user_message, bot_message] pairs.
81
+ Gradio's Chatbot component manages the visual history,
82
+ but our SimpleGroqAgent manages the history sent to the LLM.
83
+ """
84
+ # The SimpleGroqAgent already manages its own internal history.
85
+ # We just need to pass the current message to it.
86
+ response = groq_agent.chat(message)
87
+ return response
88
+
89
+ # Create the Gradio ChatInterface
90
+ # In Colab, share=True will generate a public URL.
91
+ demo = gr.ChatInterface(
92
+ fn=chat_interface_function,
93
+ chatbot=gr.Chatbot(height=300),
94
+ textbox=gr.Textbox(placeholder="Ask me anything, powered by Groq!", container=False, scale=7),
95
+ title="⚡️ Groq-Powered AI Assistant in Colab ⚡️",
96
+ description=f"Ask a question and get a lightning-fast response from Groq's {DEFAULT_GROQ_MODEL} LLM!",
97
+ theme="soft",
98
+ examples=[
99
+ "Tell me a short, funny story.",
100
+ "Explain the concept of neural networks in simple terms.",
101
+ "What are the main components of a computer?",
102
+ "Write a creative short paragraph about a futuristic city."
103
+ ],
104
+ )
105
+
106
+ # Launch the Gradio app
107
+ # This will provide a public URL you can click to access the UI.
108
+ # It will also run the UI directly in the Colab output.
109
+ print(f"Launching Gradio demo with model: {DEFAULT_GROQ_MODEL}")
110
+ demo.launch()