File size: 1,417 Bytes
0f04bd3
 
78683dc
722b585
78683dc
0f04bd3
 
 
7828173
9323159
90c6fb1
 
0f04bd3
 
90c6fb1
 
0f04bd3
 
e70185f
8aefc6f
e70185f
8aefc6f
e70185f
52d7eb6
6470a23
0f04bd3
 
 
e70185f
36dac61
 
 
8f8fb45
0f04bd3
7cb9551
6470a23
133de3a
90c6fb1
133de3a
0f04bd3
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import smolagents
import os
from smolagents import (
    tool,
    CodeAgent,
    InferenceClientModel,  # This is the correct model class to use
    FinalAnswerTool
)

class newAgent:
    """Adapts smolagents.CodeAgent to the HF course template API."""
    def __init__(self):
        model_id = "Qwen/Qwen2.5-Coder-32B-Instruct"
        hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN")
        if not hf_token:
            raise RuntimeError("HUGGINGFACEHUB_API_TOKEN not set in Space secrets")
            
        system_prompt = (
            "You are an agent that answers exam questions. "
            "Your answers should contain exactly what is asked for in the question. "
            "Be exact and concise in your answers. "
            "Do not add explanations or additional information. "
            "If asked for a list, provide ONLY the items requested separated by commas."
        )
        
        # CORRECTED: Use InferenceClientModel instead of HfApiModel
        model = InferenceClientModel(model_id=model_id, token=hf_token)
        
        tools = [FinalAnswerTool()]
        self.agent = CodeAgent(
            tools=tools,
            model=model,
            add_base_tools=True,
            max_steps=3
        )
   
    def __call__(self, question: str) -> str:
        """ONE question in → ONE pure-text answer out."""
        result = self.agent.run(question)
        return result