mohammadKa143 commited on
Commit
3adc52b
·
verified ·
1 Parent(s): 8615e34

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -18
app.py CHANGED
@@ -1,11 +1,11 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
  import datetime
3
  import requests
4
  import pytz
5
  import yaml
6
  from tools.final_answer import FinalAnswerTool
7
-
8
- # from Gradio_UI import GradioUI
9
 
10
  # Below is an example of a tool that does nothing. Amaze us with your creativity !
11
  @tool
@@ -39,12 +39,30 @@ final_answer = FinalAnswerTool()
39
  # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
40
  # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
41
 
42
- model = HfApiModel(
43
- max_tokens=2096,
44
- temperature=0.5,
45
- model_id='google/gemma-2b-it',# it is possible that this model may be overloaded
46
- custom_role_conversions=None,
47
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
 
50
  # Import tool from Hub
@@ -54,7 +72,7 @@ with open("prompts.yaml", 'r') as stream:
54
  prompt_templates = yaml.safe_load(stream)
55
 
56
  agent = CodeAgent(
57
- model=model,
58
  tools=[final_answer], ## add your tools here (don't remove final answer)
59
  max_steps=6,
60
  verbosity_level=1,
@@ -66,11 +84,4 @@ agent = CodeAgent(
66
  )
67
 
68
 
69
- # GradioUI(agent).launch()
70
- try:
71
- result = agent.run("What weather is it in Europe/London?") # Or another test message
72
- print("Agent Result:", result)
73
- except Exception as e:
74
- print("An error occurred during agent run:", e)
75
- import traceback
76
- traceback.print_exc() # <--- This will print the full error
 
1
+ from smolagents import CodeAgent,DuckDuckGoSearchTool, LiteLLMModel,load_tool,tool
2
  import datetime
3
  import requests
4
  import pytz
5
  import yaml
6
  from tools.final_answer import FinalAnswerTool
7
+ import os
8
+ from Gradio_UI import GradioUI
9
 
10
  # Below is an example of a tool that does nothing. Amaze us with your creativity !
11
  @tool
 
39
  # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
40
  # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
41
 
42
+ os.environ["GOOGLE_API_KEY"] = "AIzaSyBcJrlnDDdWtjUDiLrisSOPuaAGizCLKO4"
43
+ gemini_api_key = os.environ.get("GOOGLE_API_KEY")
44
+
45
+ try:
46
+ # LiteLLM uses 'gemini/' prefix for Google AI Studio models
47
+ gemini_model = LiteLLMModel(
48
+ model_id="gemini/gemini-1.5-flash-latest",
49
+ api_key=gemini_api_key,
50
+ temperature = 0.5,
51
+ max_tokens = 2096,
52
+ custom_role_conversions=None
53
+ )
54
+ print("Successfully initialized LiteLLMModel for Gemini 1.5 Flash.")
55
+
56
+ except Exception as e:
57
+ print(f"Failed to initialize LiteLLMModel: {e}")
58
+ gemini_model = None
59
+
60
+ # model = HfApiModel(
61
+ # max_tokens=2096,
62
+ # temperature=0.5,
63
+ # model_id='google/gemma-2b-it',# it is possible that this model may be overloaded
64
+ # custom_role_conversions=None,
65
+ # )
66
 
67
 
68
  # Import tool from Hub
 
72
  prompt_templates = yaml.safe_load(stream)
73
 
74
  agent = CodeAgent(
75
+ model=gemini_model,
76
  tools=[final_answer], ## add your tools here (don't remove final answer)
77
  max_steps=6,
78
  verbosity_level=1,
 
84
  )
85
 
86
 
87
+ GradioUI(agent).launch()