dlaima commited on
Commit
86df5d9
·
verified ·
1 Parent(s): 932b4d5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +55 -19
app.py CHANGED
@@ -4,7 +4,8 @@ import os
4
  import gradio as gr
5
  import requests
6
  import pandas as pd
7
- from openai import OpenAI
 
8
  from smolagents import CodeAgent, DuckDuckGoSearchTool
9
 
10
  # System prompt used by the agent
@@ -16,35 +17,70 @@ If you're asked for a string, don’t use articles or abbreviations (e.g. for ci
16
 
17
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
18
 
 
 
 
 
 
 
 
 
 
19
  class GeminiFlashModel:
20
- def __init__(self, model_id="gemini-1.5-flash"):
21
- self.client = OpenAI(
22
- api_key=os.getenv("GEMINI_API_KEY"),
23
- base_url="https://generativelanguage.googleapis.com/v1beta/openai/"
24
- )
25
- self.model_id = model_id
26
  self.system_prompt = SYSTEM_PROMPT
27
 
28
- def generate(self, messages):
29
- # Ensure system prompt is present
 
 
 
30
  if not any(m.get("role") == "system" for m in messages):
31
  messages = [{"role": "system", "content": self.system_prompt}] + messages
32
 
33
- response = self.client.chat.completions.create(
34
- model=self.model_id,
35
- messages=messages
36
- )
37
- # Return the generated content string directly
38
- return response.choices[0].message.content
39
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  class MyAgent:
41
  def __init__(self):
42
- self.model = GeminiFlashModel()
43
  self.agent = CodeAgent(tools=[DuckDuckGoSearchTool()], model=self.model)
44
 
45
  def __call__(self, question: str) -> str:
46
- return self.agent.run(question)
47
-
 
 
 
 
 
 
 
 
 
48
  def run_and_submit_all(profile: gr.OAuthProfile | None):
49
  space_id = os.getenv("SPACE_ID")
50
 
@@ -113,6 +149,7 @@ def run_and_submit_all(profile: gr.OAuthProfile | None):
113
  except Exception as e:
114
  return f"Submission failed: {e}", pd.DataFrame(results_log)
115
 
 
116
  with gr.Blocks() as demo:
117
  gr.Markdown("# Basic Agent Evaluation Runner")
118
  gr.Markdown("""
@@ -136,4 +173,3 @@ if __name__ == "__main__":
136
 
137
 
138
 
139
-
 
4
  import gradio as gr
5
  import requests
6
  import pandas as pd
7
+
8
+ import google.generativeai as genai
9
  from smolagents import CodeAgent, DuckDuckGoSearchTool
10
 
11
  # System prompt used by the agent
 
17
 
18
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
19
 
20
+ # Generation result wrapper to match smolagents expectations
21
+ class GenerationResult:
22
+ def __init__(self, content, token_usage=None, input_tokens=0, output_tokens=0):
23
+ self.content = content
24
+ self.token_usage = token_usage or {}
25
+ self.input_tokens = input_tokens
26
+ self.output_tokens = output_tokens
27
+
28
+ # Gemini model wrapper
29
  class GeminiFlashModel:
30
+ def __init__(self, model_id="gemini-1.5-flash", api_key=None):
31
+ genai.configure(api_key=api_key or os.getenv("GEMINI_API_KEY"))
32
+ self.model = genai.GenerativeModel(model_id)
 
 
 
33
  self.system_prompt = SYSTEM_PROMPT
34
 
35
+ # Accept stop_sequences explicitly to avoid unexpected kwarg errors
36
+ def generate(self, messages, stop_sequences=None, **kwargs):
37
+ if not isinstance(messages, list) or not all(isinstance(m, dict) for m in messages):
38
+ raise TypeError("Expected 'messages' to be a list of dicts")
39
+
40
  if not any(m.get("role") == "system" for m in messages):
41
  messages = [{"role": "system", "content": self.system_prompt}] + messages
42
 
43
+ prompt = ""
44
+ for m in messages:
45
+ role = m["role"].capitalize()
46
+ content = m["content"]
47
+ prompt += f"{role}: {content}\n"
 
48
 
49
+ try:
50
+ # Note: genai.GenerativeModel.generate_content may not support stop_sequences
51
+ response = self.model.generate_content(prompt)
52
+ return GenerationResult(
53
+ content=response.text.strip(),
54
+ token_usage={}, # you can extend if API provides token info
55
+ input_tokens=0,
56
+ output_tokens=0
57
+ )
58
+ except Exception as e:
59
+ return GenerationResult(
60
+ content=f"GENERATION ERROR: {e}",
61
+ token_usage={},
62
+ input_tokens=0,
63
+ output_tokens=0
64
+ )
65
+
66
+ # Agent wrapper
67
  class MyAgent:
68
  def __init__(self):
69
+ self.model = GeminiFlashModel(model_id="gemini-1.5-flash")
70
  self.agent = CodeAgent(tools=[DuckDuckGoSearchTool()], model=self.model)
71
 
72
  def __call__(self, question: str) -> str:
73
+ # The agent.run expects a string answer
74
+ result = self.agent.run(question)
75
+ # If result is GenerationResult or dict-like, convert to string
76
+ if hasattr(result, "content"):
77
+ return result.content
78
+ elif isinstance(result, dict):
79
+ return result.get("content", str(result))
80
+ else:
81
+ return str(result)
82
+
83
+ # Main evaluation function
84
  def run_and_submit_all(profile: gr.OAuthProfile | None):
85
  space_id = os.getenv("SPACE_ID")
86
 
 
149
  except Exception as e:
150
  return f"Submission failed: {e}", pd.DataFrame(results_log)
151
 
152
+ # Gradio UI setup
153
  with gr.Blocks() as demo:
154
  gr.Markdown("# Basic Agent Evaluation Runner")
155
  gr.Markdown("""
 
173
 
174
 
175