File size: 4,783 Bytes
9ca7ada
5c40ea2
 
 
 
f930d1f
5c40ea2
0ceb851
0d66232
5c40ea2
 
43062fb
5c40ea2
 
 
 
 
 
 
 
 
 
 
 
08db7cb
10ff772
5c40ea2
 
 
 
 
 
 
 
 
0d66232
 
 
 
 
 
 
 
 
 
 
86fe827
08db7cb
0d66232
 
 
5c40ea2
5664e91
5c40ea2
 
 
 
 
 
 
 
755e93c
5c40ea2
86fe827
08db7cb
5c40ea2
0d66232
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5c40ea2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f1f4238
5c40ea2
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
import os
from smolagents import (
    CodeAgent, 
    DuckDuckGoSearchTool,
    VisitWebpageTool, 
    PythonInterpreterTool,
    InferenceClientModel,
    OpenAIServerModel,
    # HfApiModel, # import bug from smolagents after adding duckduckgo-search in requirements
    tool
)
from typing import List, Dict, Any, Optional



class QAgent:
    def __init__(
        self,
        model_type: str = "InferenceClientModel",
        model_id: Optional[str] = None,
        api_key: Optional[str] = None,
        provider: Optional[str] = None,  # for InferenceClientModel
        timeout: Optional[int] = None,   # for InferenceClientModel
        system_prompt: Optional[str] = None,
        temperature: float = 0.2,
        verbose: bool = False # Verbose logging or not
    ):
        """ 
        QAgent description 
        """

        self.verbose = verbose
        self.system_prompt = system_prompt


        # if model_type == "HfApiModel":
        #     if api_key is None:
        #         api_key = os.getenv("Q_NEB_TOK")
        #         if not api_key:
        #             raise ValueError("No API Key found for HuggingFace. Please set Q_NEB_TOK or pass api_key.")
        #     
        #     if self.verbose:
        #         print(f"Using Hugging Face token: {api_key[:5]}... (HfApiModel mode)")
        #         
        #     self.model = HfApiModel(
        #         model_id=model_id or "Qwen/Qwen2.5-Coder-32B-Instruct", # précédemment : or "meta-llama/Llama-3-70B-Instruct",
        #         token=api_key,
        #         temperature=temperature
        #     )
        # el
        if model_type == "InferenceClientModel":
            if api_key is None:
                api_key = os.getenv("Q_NEB_TOK")
                if not api_key:
                    raise ValueError("No API Key found for HuggingFace. Please set SP_HF_TOK or pass api_key.")
            
            if self.verbose:
                print(f"Using Hugging Face token: {api_key[:5]}... (InferenceClientModel mode)")
                
            self.model = InferenceClientModel(
                model_id=model_id or "Qwen/Qwen2.5-Coder-32B-Instruct", # précédemment : or "meta-llama/Llama-3-70B-Instruct",
                provider=provider or "nebius", # or "hf-inference",
                token=api_key,
                timeout=timeout or 120,
                temperature=temperature
            )
        elif model_type == "OpenAIServerModel":
            print(f"Trying to configure OpenAIServerModel.")
            # Check for xAI API key and base URL first
            xai_api_key = os.getenv("XAI_API_KEY")
            # xai_api_base = os.getenv("XAI_API_BASE") # Ne sais pas à quoi ça sert..
            
            # If xAI credentials are available, use them
            if xai_api_key and api_key is None:
                api_key = xai_api_key
                if self.verbose:
                    print(f"Using xAI API key: {api_key[:5]}...")
            
            # If no API key specified, fall back to OPENAI_API_KEY
            if api_key is None:
                api_key = os.getenv("Q_OAI_TOK")
                if not api_key:
                    raise ValueError("No OpenAI API key provided. Please set Q_OAI_TOK or XAI_API_KEY environment variable or pass api_key parameter.")
            
            self.model = OpenAIServerModel(
                model_id=model_id or "gpt-4o",
                api_key=api_key,
                # api_base=api_base,
                temperature=temperature
            )
        else:
            raise ValueError(f"Unknown model type: {model_type}")
        
        if self.verbose:
            print(f"Model initialized: {model_type} - {self.model.model_id} - prov: {self.model.provider}")

        # Initialize tools
        self.tools = [
            DuckDuckGoSearchTool(),
            PythonInterpreterTool(),
            # save_and_read_file,
            # download_file_from_url,
            # analyze_csv_file,
            # analyze_excel_file
        ]

        # Setup imports
        self.imports = ["pandas", "numpy", "datetime", "json", "re", "math", "os", "requests", "csv", "urllib"]

        # Create CodeAgent
        self.agent = CodeAgent(
            tools=self.tools,
            model=self.model,
            # additional_authorized_imports=self.imports,
            # executor_type=executor_type,
            # executor_kwargs=executor_kwargs,
            verbosity_level=2 if self.verbose else 0
        )
        
        if self.verbose:
            print("CodeAgent initialized")

    def invoke(self, prompt: str) -> str:
        print(f"Agent invoked with prompt: {prompt[:80]}...")

        result = self.agent.run(prompt)
        print(result) 
        
        return result