File size: 5,363 Bytes
73152bb
c02d868
ae12037
a9e9116
667e88a
49b6791
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ae12037
c02d868
ae12037
49b6791
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a9e9116
ae12037
 
 
a9e9116
 
 
 
 
73152bb
a9e9116
 
49b6791
a9e9116
49b6791
a9e9116
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
49b6791
a9e9116
49b6791
 
427cae4
ae12037
f51a39b
73152bb
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import os
import requests
import urllib.parse
from bs4 import BeautifulSoup

class BaseModel:
    def answer(self, prompt: str) -> str:
        raise NotImplementedError("Model must implement the answer method.")

class HfApiModel(BaseModel):
    def __init__(self, model_name: str, api_token: str):
        self.model_name = model_name
        self.api_token = api_token

    def answer(self, prompt: str) -> str:
        url = f"https://api-inference.huggingface.co/models/{self.model_name}"
        headers = {
            "Authorization": f"Bearer {self.api_token}",
            "Content-Type": "application/json"
        }
        payload = {
            "inputs": prompt,
            "parameters": {
                "max_new_tokens": 200,
                "temperature": 0.0
            }
        }

        try:
            response = requests.post(url, headers=headers, json=payload, timeout=30)
            response.raise_for_status()
            output = response.json()
            if isinstance(output, list) and "generated_text" in output[0]:
                return output[0]["generated_text"].strip()[:200]
            return "No response generated."
        except Exception as e:
            return f"Error from Hugging Face API: {e}"

class LiteLLMModel(BaseModel):
    def __init__(self, endpoint_url: str):
        self.url = endpoint_url

    def answer(self, prompt: str) -> str:
        try:
            response = requests.post(self.url, json={"input": prompt}, timeout=30)
            response.raise_for_status()
            return response.json().get("output", "No output.")
        except Exception as e:
            return f"LiteLLM error: {e}"

class OpenAIServerModel(BaseModel):
    def __init__(self, api_key: str, model: str = "gpt-3.5-turbo"):
        self.api_key = api_key
        self.model = model

    def answer(self, prompt: str) -> str:
        try:
            response = requests.post(
                "https://api.openai.com/v1/chat/completions",
                headers={
                    "Authorization": f"Bearer {self.api_key}",
                    "Content-Type": "application/json"
                },
                json={
                    "model": self.model,
                    "messages": [{"role": "user", "content": prompt}],
                    "max_tokens": 200,
                    "temperature": 0.0
                },
                timeout=30
            )
            response.raise_for_status()
            data = response.json()
            return data["choices"][0]["message"]["content"].strip()[:200]
        except Exception as e:
            return f"OpenAI error: {e}"

class DuckDuckGoAgent:
    def __init__(self):
        print("DuckDuckGoAgent initialized.")
        self.headers = {"User-Agent": "Mozilla/5.0"}
        self.hf_api_key = os.getenv("HF_API_TOKEN")
        self.model_type = os.getenv("MODEL_TYPE", "huggingface")
        self.model_name = os.getenv("MODEL_NAME", "mistralai/Mistral-7B-Instruct-v0.1")
        self.model_url = os.getenv("MODEL_URL")  # For LiteLLM
        self.openai_key = os.getenv("OPENAI_API_KEY")

        self.llm = self._init_model()

    def _init_model(self) -> BaseModel:
        if self.model_type == "openai" and self.openai_key:
            return OpenAIServerModel(api_key=self.openai_key)
        elif self.model_type == "litellm" and self.model_url:
            return LiteLLMModel(endpoint_url=self.model_url)
        elif self.model_type == "huggingface" and self.hf_api_key:
            return HfApiModel(model_name=self.model_name, api_token=self.hf_api_key)
        else:
            raise ValueError("No valid model configuration found.")

    def get_duckduckgo_answer(self, query: str) -> str:
        search_query = urllib.parse.quote(query)
        url = f"https://api.duckduckgo.com/?q={search_query}&format=json&no_html=1&skip_disambig=1"

        try:
            response = requests.get(url, timeout=10)
            if response.status_code == 200:
                data = response.json()
                if 'AbstractText' in data and data['AbstractText']:
                    return data['AbstractText'][:200]
                return self.scrape_duckduckgo(query)
            return self.scrape_duckduckgo(query)
        except Exception as e:
            print(f"Error with DuckDuckGo API: {e}")
            return self.scrape_duckduckgo(query)

    def scrape_duckduckgo(self, query: str) -> str:
        print("Using fallback: scraping HTML results.")
        try:
            response = requests.post(
                "https://html.duckduckgo.com/html/",
                data={"q": query},
                headers=self.headers,
                timeout=10
            )
            soup = BeautifulSoup(response.text, "html.parser")
            snippets = soup.select(".result__snippet")
            for s in snippets:
                text = s.get_text().strip()
                if text:
                    return text[:200]
            return self.llm.answer(query)
        except Exception as e:
            print(f"Scraping error: {e}")
            return self.llm.answer(query)

    def __call__(self, question: str) -> str:
        print(f"Agent received question: {question[:50]}...")
        answer = self.get_duckduckgo_answer(question)
        print(f"Agent returning answer: {answer}")
        return answer