yangminded commited on
Commit
8c76ace
·
verified ·
1 Parent(s): a73350f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -3
app.py CHANGED
@@ -4,6 +4,9 @@ import requests
4
  import inspect
5
  import pandas as pd
6
 
 
 
 
7
  # (Keep Constants as is)
8
  # --- Constants ---
9
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
@@ -11,13 +14,43 @@ DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
11
  # --- Basic Agent Definition ---
12
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
13
  class BasicAgent:
 
 
14
  def __init__(self):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  print("BasicAgent initialized.")
 
16
  def __call__(self, question: str) -> str:
17
  print(f"Agent received question (first 50 chars): {question[:50]}...")
18
- fixed_answer = "This is a default answer."
19
- print(f"Agent returning fixed answer: {fixed_answer}")
20
- return fixed_answer
21
 
22
  def run_and_submit_all( profile: gr.OAuthProfile | None):
23
  """
 
4
  import inspect
5
  import pandas as pd
6
 
7
+ from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
8
+ from tools.final_answer import FinalAnswerTool
9
+
10
  # (Keep Constants as is)
11
  # --- Constants ---
12
  DEFAULT_API_URL = "https://agents-course-unit4-scoring.hf.space"
 
14
  # --- Basic Agent Definition ---
15
  # ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
16
  class BasicAgent:
17
+ agent
18
+
19
  def __init__(self):
20
+ final_answer = FinalAnswerTool()
21
+
22
+ # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
23
+ # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
24
+
25
+ model = HfApiModel(
26
+ max_tokens=2096,
27
+ temperature=0.5,
28
+ model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud', #Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
29
+ custom_role_conversions=None,
30
+ )
31
+
32
+ with open("prompts.yaml", 'r') as stream:
33
+ prompt_templates = yaml.safe_load(stream)
34
+
35
+ self.agent = CodeAgent(
36
+ model=model,
37
+ tools=[final_answer], ## add your tools here (don't remove final answer)
38
+ max_steps=6,
39
+ verbosity_level=1,
40
+ grammar=None,
41
+ planning_interval=None,
42
+ name=None,
43
+ description=None,
44
+ prompt_templates=prompt_templates
45
+ )
46
+
47
  print("BasicAgent initialized.")
48
+
49
  def __call__(self, question: str) -> str:
50
  print(f"Agent received question (first 50 chars): {question[:50]}...")
51
+ answer = self.agent.run(question)
52
+ print(f"Agent returning answer: {answer}")
53
+ return answer
54
 
55
  def run_and_submit_all( profile: gr.OAuthProfile | None):
56
  """