tonko22 commited on
Commit
a8e40a3
·
1 Parent(s): 23f553c

Bugfix: remove openrouter API

Browse files
Files changed (3) hide show
  1. config.py +1 -1
  2. pyproject.toml +1 -1
  3. run_single_agent.py +8 -4
config.py CHANGED
@@ -24,7 +24,7 @@ def load_api_keys():
24
  """Load API keys from environment variables."""
25
  # Gemini API is the default
26
  os.environ["GEMINI_API_KEY"] = os.getenv("GEMINI_API_KEY")
27
- os.environ["OPENROUTER_API_KEY"] = os.getenv("OPENROUTER_API_KEY")
28
 
29
  def get_model_id(use_local=True, provider="ollama"):
30
  """Get the appropriate model ID based on configuration.
 
24
  """Load API keys from environment variables."""
25
  # Gemini API is the default
26
  os.environ["GEMINI_API_KEY"] = os.getenv("GEMINI_API_KEY")
27
+
28
 
29
  def get_model_id(use_local=True, provider="ollama"):
30
  """Get the appropriate model ID based on configuration.
pyproject.toml CHANGED
@@ -10,6 +10,6 @@ dependencies = [
10
  "litellm>=1.61.20",
11
  "loguru>=0.7.3",
12
  "pyyaml>=6.0.2",
13
- "smolagents>=1.9.2",
14
  "tenacity>=9.0.0",
15
  ]
 
10
  "litellm>=1.61.20",
11
  "loguru>=0.7.3",
12
  "pyyaml>=6.0.2",
13
+ "smolagents[mcp]>=1.9.2",
14
  "tenacity>=9.0.0",
15
  ]
run_single_agent.py CHANGED
@@ -4,7 +4,7 @@ from smolagents import LiteLLMModel
4
 
5
  from agents.single_agent import create_single_agent
6
  from loguru import logger
7
- from config import get_model_id, get_ollama_api_base, setup_logger, load_api_keys
8
 
9
  setup_logger()
10
  load_api_keys()
@@ -12,13 +12,17 @@ load_api_keys()
12
  # Set environment variables for API keys if needed
13
  os.environ["GEMINI_API_KEY"] = str(os.getenv("GEMINI_API_KEY"))
14
 
15
-
16
  use_local = False
17
 
18
  # If using Ollama, we need to specify the API base URL
19
  # Initialize the LLM model based on configuration
20
- model_id = "openrouter/google/gemini-2.0-flash-lite-preview-02-05:free"
 
 
 
 
21
  logger.info(f"Initializing with model: {model_id}")
 
22
  if use_local:
23
  api_base = get_ollama_api_base()
24
  logger.info(f"Using Ollama API base: {api_base}")
@@ -30,7 +34,7 @@ else:
30
  # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
31
 
32
  # Prompt the user for the song name
33
- song_data = "RCHP - On Mercury"
34
 
35
  agent = create_single_agent(model)
36
 
 
4
 
5
  from agents.single_agent import create_single_agent
6
  from loguru import logger
7
+ from config import get_ollama_api_base, setup_logger, load_api_keys, get_model_id
8
 
9
  setup_logger()
10
  load_api_keys()
 
12
  # Set environment variables for API keys if needed
13
  os.environ["GEMINI_API_KEY"] = str(os.getenv("GEMINI_API_KEY"))
14
 
 
15
  use_local = False
16
 
17
  # If using Ollama, we need to specify the API base URL
18
  # Initialize the LLM model based on configuration
19
+ if use_local:
20
+ model_id = "openrouter/google/gemini-2.0-flash-lite-preview-02-05:free"
21
+ else:
22
+ model_id = get_model_id(use_local=use_local)
23
+
24
  logger.info(f"Initializing with model: {model_id}")
25
+
26
  if use_local:
27
  api_base = get_ollama_api_base()
28
  logger.info(f"Using Ollama API base: {api_base}")
 
34
  # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
35
 
36
  # Prompt the user for the song name
37
+ song_data = "John Frusciante - Crowded"
38
 
39
  agent = create_single_agent(model)
40