fdaudens HF Staff commited on
Commit
0ba8921
·
verified ·
1 Parent(s): 1f49f4f

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +225 -0
app.py ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+ import json
4
+ import time
5
+ from typing import Any, Dict, List
6
+ from openai import OpenAI
7
+ from dotenv import load_dotenv
8
+ load_dotenv()
9
+
10
+ HF_TOKEN = os.getenv("HF_TOKEN")
11
+
12
+ # Available models for selection
13
+ AVAILABLE_MODELS = [
14
+ "openai/gpt-oss-120b:fireworks-ai",
15
+ "openai/gpt-oss-20b:fireworks-ai"
16
+ ]
17
+
18
+ # Default model
19
+ DEFAULT_MODEL = "openai/gpt-oss-120b:fireworks-ai"
20
+ BASE_URL = "https://router.huggingface.co/v1"
21
+
22
+ client = OpenAI(base_url=BASE_URL, api_key=HF_TOKEN)
23
+
24
+ # OpenAI-style tool specs for function calling
25
+ TOOLS = [
26
+ {
27
+ "type": "function",
28
+ "function": {
29
+ "name": "get_weather",
30
+ "description": "Get the current weather information for a specified city",
31
+ "parameters": {
32
+ "type": "object",
33
+ "properties": {
34
+ "city": {
35
+ "type": "string",
36
+ "description": "The name of the city to get weather information for"
37
+ }
38
+ },
39
+ "required": ["city"]
40
+ }
41
+ }
42
+ }
43
+ ]
44
+
45
+ def get_weather(city: str):
46
+ print(f"[debug] getting weather for {city}")
47
+ return f"The weather in {city} is sunny."
48
+
49
+ FUNCTION_MAP = {
50
+ "get_weather": get_weather,
51
+ }
52
+
53
+ def call_model(messages: List[Dict[str, str]], tools=None, temperature: float = 0.3, model: str = DEFAULT_MODEL):
54
+ """One step with tool calling support."""
55
+ if tools is None:
56
+ tools = TOOLS
57
+ try:
58
+ return client.chat.completions.create(
59
+ model=model,
60
+ temperature=temperature,
61
+ messages=messages,
62
+ tools=tools,
63
+ tool_choice="auto"
64
+ )
65
+ except Exception as e:
66
+ print(f"Error calling model: {e}")
67
+ raise
68
+
69
+ def run_weather_agent(user_prompt: str, model: str = DEFAULT_MODEL) -> str:
70
+ """
71
+ High level prompt for a weather agent.
72
+ It gets weather information for cities and provides responses.
73
+ """
74
+ system = {
75
+ "role": "system",
76
+ "content": (
77
+ "You are a helpful weather agent. Follow these steps:\n"
78
+ "1. When a user asks about weather in a city, use get_weather tool\n"
79
+ "2. Provide a friendly response with the weather information\n"
80
+ "3. If no city is mentioned, ask the user to specify a city\n"
81
+ "4. Be conversational and helpful\n"
82
+ ),
83
+ }
84
+
85
+ messages: List[Dict[str, str]] = [system, {"role": "user", "content": user_prompt}]
86
+
87
+ for step in range(3): # small safety cap
88
+ try:
89
+ resp = call_model(messages, tools=TOOLS, model=model)
90
+ msg = resp.choices[0].message
91
+
92
+ # If the model wants to call tools
93
+ if getattr(msg, "tool_calls", None) and msg.tool_calls:
94
+ # Add the assistant message with tool calls to the conversation
95
+ assistant_message = {
96
+ "role": "assistant",
97
+ "content": msg.content or "",
98
+ "tool_calls": [
99
+ {
100
+ "id": tool_call.id,
101
+ "type": "function",
102
+ "function": {
103
+ "name": tool_call.function.name,
104
+ "arguments": tool_call.function.arguments
105
+ }
106
+ }
107
+ for tool_call in msg.tool_calls
108
+ ]
109
+ }
110
+ messages.append(assistant_message)
111
+
112
+ # Process each tool call
113
+ for tool_call in msg.tool_calls:
114
+ name = tool_call.function.name
115
+ args = {}
116
+ try:
117
+ args = json.loads(tool_call.function.arguments or "{}")
118
+ except json.JSONDecodeError:
119
+ args = {}
120
+
121
+ fn = FUNCTION_MAP.get(name)
122
+ if not fn:
123
+ messages.append({
124
+ "role": "tool",
125
+ "tool_call_id": tool_call.id,
126
+ "name": name,
127
+ "content": json.dumps({"ok": False, "error": "unknown_tool"})
128
+ })
129
+ continue
130
+
131
+ try:
132
+ result = fn(**args)
133
+ except TypeError as e:
134
+ result = {"ok": False, "error": f"bad_args: {e}"}
135
+ except Exception as e:
136
+ result = {"ok": False, "error": repr(e)}
137
+
138
+ tool_response = {
139
+ "role": "tool",
140
+ "tool_call_id": tool_call.id,
141
+ "name": name,
142
+ "content": json.dumps(result),
143
+ }
144
+ messages.append(tool_response)
145
+
146
+ # Continue loop so the model can see tool outputs
147
+ continue
148
+
149
+ # If we have a final assistant message without tool calls
150
+ if msg.content:
151
+ return msg.content
152
+
153
+ # Fallback tiny sleep then continue
154
+ time.sleep(0.2)
155
+
156
+ except Exception as e:
157
+ # If there's an error, try to continue or return error message
158
+ if step == 2: # Last step
159
+ return f"Error occurred during processing: {e}"
160
+ time.sleep(0.5)
161
+ continue
162
+
163
+ return "I could not complete the task within the step limit. Try refining your query."
164
+
165
+ # Example usage of the weather agent
166
+ # if __name__ == "__main__":
167
+ # # Test the weather agent with different queries
168
+ # test_queries = [
169
+ # "What's the weather like in New York?",
170
+ # "How's the weather in London?",
171
+ # "Tell me about the weather in Tokyo",
172
+ # "What's the weather like?" # This should prompt for a city
173
+ # ]
174
+
175
+ # print("=== Weather Agent Demo ===\n")
176
+
177
+ # for query in test_queries:
178
+ # print(f"User: {query}")
179
+ # try:
180
+ # response = call_model(messages=[{"role": "user", "content": query}]) # Assuming run_weather_agent is removed or replaced
181
+ # print(f"Agent: {response}\n")
182
+ # except Exception as e:
183
+ # print(f"Error: {e}\n")
184
+ # print("-" * 50 + "\n")
185
+
186
+
187
+ ### GRADIO
188
+ def weather_chat_with_agent(message, history, model):
189
+ """Handle weather chat messages and return agent responses."""
190
+ if not message.strip():
191
+ return history
192
+
193
+ try:
194
+ response = run_weather_agent(message, model)
195
+
196
+ history.append({"role": "user", "content": message})
197
+ history.append({"role": "assistant", "content": response})
198
+
199
+ return history
200
+ except Exception as e:
201
+ error_msg = f"Sorry, I encountered an error: {str(e)}"
202
+ history.append({"role": "user", "content": message})
203
+ history.append({"role": "assistant", "content": error_msg})
204
+ return history
205
+
206
+ def create_weather_interface():
207
+ with gr.Blocks(title="Weather Agent") as demo:
208
+ gr.Markdown("# 🌤️ Weather Agent")
209
+ gr.Markdown("Ask me about the weather in any city!")
210
+
211
+ chatbot = gr.Chatbot(height=400, type="messages")
212
+ msg = gr.Textbox(label="Ask about weather", placeholder="e.g., What's the weather like in Paris?")
213
+ clear = gr.Button("Clear")
214
+
215
+ def respond(message, chat_history):
216
+ return weather_chat_with_agent(message, chat_history, DEFAULT_MODEL)
217
+
218
+ msg.submit(respond, [msg, chatbot], [chatbot])
219
+ clear.click(lambda: None, None, chatbot, queue=False)
220
+
221
+ return demo
222
+
223
+ # To run the weather interface:
224
+ demo = create_weather_interface()
225
+ demo.launch()