File size: 3,438 Bytes
9ca7ada
5c40ea2
 
 
 
 
 
 
 
43062fb
5c40ea2
 
 
 
 
 
 
 
 
 
 
 
10ff772
5c40ea2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f1f4238
5c40ea2
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
import os
from smolagents import (
    CodeAgent, 
    DuckDuckGoSearchTool,
    VisitWebpageTool, 
    InferenceClientModel,
    HfApiModel,
    tool
)
from typing import List, Dict, Any, Optional



class QAgent:
    def __init__(
        self,
        model_type: str = "InferenceClientModel",
        model_id: Optional[str] = None,
        api_key: Optional[str] = None,
        provider: Optional[str] = None,  # for InferenceClientModel
        timeout: Optional[int] = None,   # for InferenceClientModel
        system_prompt: Optional[str] = None,
        verbose: bool = False # Verbose logging or not
    ):
        """ 
        QAgent description 
        """

        self.verbose = verbose
        self.system_prompt = system_prompt


        if model_type == "HfApiModel":
            if api_key is None:
                api_key = os.getenv("SP_HF_TOK")
                if not api_key:
                    raise ValueError("No API Key found for HuggingFace. Please set SP_HF_TOK or pass api_key.")
            
            if self.verbose:
                print(f"Using Hugging Face token: {api_key[:5]}... (HfApiModel mode)")
                
            self.model = HfApiModel(
                model_id=model_id or "Qwen/Qwen2.5-Coder-32B-Instruct", # précédemment : or "meta-llama/Llama-3-70B-Instruct",
                token=api_key
                # temperature=temperature
            )
        elif model_type == "InferenceClientModel":
            if api_key is None:
                api_key = os.getenv("SP_HF_TOK")
                if not api_key:
                    raise ValueError("No API Key found for HuggingFace. Please set SP_HF_TOK or pass api_key.")
            
            if self.verbose:
                print(f"Using Hugging Face token: {api_key[:5]}... (InferenceClientModel mode)")
                
            self.model = InferenceClientModel(
                model_id=model_id or "Qwen/Qwen2.5-Coder-32B-Instruct", # précédemment : or "meta-llama/Llama-3-70B-Instruct",
                provider=provider or "hf-inference",
                token=api_key,
                timeout=timeout or 120
                # temperature=temperature
            )
        else:
            raise ValueError(f"Unknown model type: {model_type}")
        
        if self.verbose:
            print(f"Model initialized: {model_type} - {self.model.model_id} - prov: {self.model.provider}")

        # Initialize tools
        self.tools = [
            DuckDuckGoSearchTool(),
            PythonInterpreterTool(),
            # save_and_read_file,
            # download_file_from_url,
            # analyze_csv_file,
            # analyze_excel_file
        ]

        # Setup imports
        self.imports = ["pandas", "numpy", "datetime", "json", "re", "math", "os", "requests", "csv", "urllib"]

        # Create CodeAgent
        self.agent = CodeAgent(
            tools=self.tools,
            model=self.model,
            # additional_authorized_imports=self.imports,
            # executor_type=executor_type,
            # executor_kwargs=executor_kwargs,
            verbosity_level=2 if self.verbose else 0
        )
        
        if self.verbose:
            print("CodeAgent initialized")

    def invoke(self, prompt: str) -> str:
        print(f"Agent invoked with prompt: {prompt[:80]}...")

        result = self.agent.run(prompt)
        print(result) 
        
        return result