siyah1 commited on
Commit
f961f4b
·
verified ·
1 Parent(s): 79d27e8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -16
app.py CHANGED
@@ -14,37 +14,34 @@ import io
14
 
15
  class GroqLLM:
16
  """Compatible LLM interface for smolagents CodeAgent"""
17
- def __init__(self, model_name="llama-3.1-8B-Instant"):
18
  self.client = Groq(api_key=os.environ.get("GROQ_API_KEY"))
19
  self.model_name = model_name
20
-
21
  def __call__(self, prompt: Union[str, dict, List[Dict]]) -> str:
22
- """Make the class callable as required by smolagents"""
 
 
 
 
23
  try:
24
- # Handle different prompt formats
25
  if isinstance(prompt, (dict, list)):
26
  prompt_str = str(prompt)
27
  else:
28
  prompt_str = str(prompt)
29
-
30
- # Create a properly formatted message
31
  completion = self.client.chat.completions.create(
32
  model=self.model_name,
33
- messages=[{
34
- "role": "user",
35
- "content": prompt_str
36
- }],
37
  temperature=0.7,
38
  max_tokens=1024,
39
  stream=False
40
  )
41
-
42
- return completion.choices[0].message.content if completion.choices else "Error: No response generated"
43
-
44
  except Exception as e:
45
- error_msg = f"Error generating response: {str(e)}"
46
- print(error_msg)
47
- return error_msg
48
 
49
  class DataAnalysisAgent(CodeAgent):
50
  """Extended CodeAgent with dataset awareness"""
 
14
 
15
  class GroqLLM:
16
  """Compatible LLM interface for smolagents CodeAgent"""
17
+ def __init__(self, model_name="llama3-8b-8192"):
18
  self.client = Groq(api_key=os.environ.get("GROQ_API_KEY"))
19
  self.model_name = model_name
20
+
21
  def __call__(self, prompt: Union[str, dict, List[Dict]]) -> str:
22
+ """Allows use as callable (legacy compatibility)"""
23
+ return self.generate(prompt)
24
+
25
+ def generate(self, prompt: Union[str, dict, List[Dict]]) -> str:
26
+ """Generate text completion from Groq API"""
27
  try:
 
28
  if isinstance(prompt, (dict, list)):
29
  prompt_str = str(prompt)
30
  else:
31
  prompt_str = str(prompt)
32
+
 
33
  completion = self.client.chat.completions.create(
34
  model=self.model_name,
35
+ messages=[{"role": "user", "content": prompt_str}],
 
 
 
36
  temperature=0.7,
37
  max_tokens=1024,
38
  stream=False
39
  )
40
+
41
+ return completion.choices[0].message.content.strip() if completion.choices else "Error: No response generated"
 
42
  except Exception as e:
43
+ return f"Error generating response: {str(e)}"
44
+
 
45
 
46
  class DataAnalysisAgent(CodeAgent):
47
  """Extended CodeAgent with dataset awareness"""