dlaima commited on
Commit
b138c9c
·
verified ·
1 Parent(s): 4fcc621

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -59
app.py CHANGED
@@ -5,10 +5,9 @@ import gradio as gr
5
  import requests
6
  import pandas as pd
7
 
8
- import google.generativeai as genai
9
- from smolagents import CodeAgent, DuckDuckGoSearchTool
10
 
11
- # System prompt used by the agent
12
  SYSTEM_PROMPT = """You are a general AI assistant. I will ask you a question.
13
  Report your thoughts, and finish your answer with just the answer — no prefixes like "FINAL ANSWER:".
14
  Your answer should be a number OR as few words as possible OR a comma-separated list of numbers and/or strings.
@@ -17,66 +16,29 @@ If you're asked for a string, don’t use articles or abbreviations (e.g. for ci
17
 
18
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
19
 
20
- # Generation result wrapper to match smolagents expectations
21
- class GenerationResult:
22
- def __init__(self, content, input_tokens=0, output_tokens=0, token_usage=None):
23
- self.content = content
24
- self.input_tokens = input_tokens
25
- self.output_tokens = output_tokens
26
- self.token_usage = token_usage or {}
27
-
28
- # Gemini model wrapper
29
- class GeminiFlashModel:
30
- def __init__(self, model_id="gemini-1.5-flash", api_key=None):
31
- genai.configure(api_key=api_key or os.getenv("GEMINI_API_KEY"))
32
- self.model = genai.GenerativeModel(model_id)
33
- self.system_prompt = SYSTEM_PROMPT
34
-
35
- def generate(self, messages, **kwargs):
36
- if not isinstance(messages, list) or not all(isinstance(m, dict) for m in messages):
37
- raise TypeError("Expected 'messages' to be a list of dicts")
38
-
39
- # Ensure system prompt is first message
40
- if not any(m.get("role") == "system" for m in messages):
41
- messages = [{"role": "system", "content": self.system_prompt}] + messages
42
-
43
- # Build prompt text by concatenating messages with roles
44
- prompt = ""
45
- for m in messages:
46
- role = m["role"].capitalize()
47
- content = m["content"]
48
- prompt += f"{role}: {content}\n"
49
-
50
- try:
51
- response = self.model.generate_content(prompt)
52
- # Always wrap the result in GenerationResult
53
- return GenerationResult(
54
- content=response.text.strip(),
55
- input_tokens=0, # Could add token counts here if available
56
- output_tokens=0,
57
- )
58
- except Exception as e:
59
- # Wrap errors too, so agent doesn't fail
60
- return GenerationResult(
61
- content=f"GENERATION ERROR: {e}",
62
- input_tokens=0,
63
- output_tokens=0,
64
- )
65
-
66
- # Agent wrapper
67
  class MyAgent:
68
  def __init__(self):
69
- self.model = GeminiFlashModel(model_id="gemini-1.5-flash")
70
- self.agent = CodeAgent(tools=[DuckDuckGoSearchTool()], model=self.model)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
 
72
  def __call__(self, question: str) -> str:
73
- result = self.agent.run(question)
74
- # result can be GenerationResult or maybe dict or str - normalize:
75
- if hasattr(result, "content"):
76
- return result.content
77
- if isinstance(result, dict):
78
- return result.get("content", str(result))
79
- return str(result)
80
 
81
  # Main evaluation function
82
  def run_and_submit_all(profile: gr.OAuthProfile | None):
 
5
  import requests
6
  import pandas as pd
7
 
8
+ from smolagents import LiteLLMModel, CodeAgent, DuckDuckGoSearchTool
 
9
 
10
+ # System prompt for the agent
11
  SYSTEM_PROMPT = """You are a general AI assistant. I will ask you a question.
12
  Report your thoughts, and finish your answer with just the answer — no prefixes like "FINAL ANSWER:".
13
  Your answer should be a number OR as few words as possible OR a comma-separated list of numbers and/or strings.
 
16
 
17
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
18
 
19
+ # Agent wrapper using LiteLLMModel
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  class MyAgent:
21
  def __init__(self):
22
+ gemini_api_key = os.getenv("GEMINI_API_KEY")
23
+ if not gemini_api_key:
24
+ raise ValueError("GEMINI_API_KEY not set in environment variables.")
25
+
26
+ # Instantiate LiteLLMModel with Gemini API key and model id
27
+ self.model = LiteLLMModel(
28
+ model_id="gemini/gemini-2.0-flash-lite",
29
+ api_key=gemini_api_key,
30
+ system_prompt=SYSTEM_PROMPT
31
+ )
32
+
33
+ # Create the CodeAgent with optional base tools and DuckDuckGo search
34
+ self.agent = CodeAgent(
35
+ tools=[DuckDuckGoSearchTool()],
36
+ model=self.model,
37
+ add_base_tools=True,
38
+ )
39
 
40
  def __call__(self, question: str) -> str:
41
+ return self.agent.run(question)
 
 
 
 
 
 
42
 
43
  # Main evaluation function
44
  def run_and_submit_all(profile: gr.OAuthProfile | None):