final-agent-course / agent.py
jjvelezo's picture
Update agent.py
49b6791 verified
raw
history blame
5.36 kB
import os
import requests
import urllib.parse
from bs4 import BeautifulSoup
class BaseModel:
def answer(self, prompt: str) -> str:
raise NotImplementedError("Model must implement the answer method.")
class HfApiModel(BaseModel):
def __init__(self, model_name: str, api_token: str):
self.model_name = model_name
self.api_token = api_token
def answer(self, prompt: str) -> str:
url = f"https://api-inference.huggingface.co/models/{self.model_name}"
headers = {
"Authorization": f"Bearer {self.api_token}",
"Content-Type": "application/json"
}
payload = {
"inputs": prompt,
"parameters": {
"max_new_tokens": 200,
"temperature": 0.0
}
}
try:
response = requests.post(url, headers=headers, json=payload, timeout=30)
response.raise_for_status()
output = response.json()
if isinstance(output, list) and "generated_text" in output[0]:
return output[0]["generated_text"].strip()[:200]
return "No response generated."
except Exception as e:
return f"Error from Hugging Face API: {e}"
class LiteLLMModel(BaseModel):
def __init__(self, endpoint_url: str):
self.url = endpoint_url
def answer(self, prompt: str) -> str:
try:
response = requests.post(self.url, json={"input": prompt}, timeout=30)
response.raise_for_status()
return response.json().get("output", "No output.")
except Exception as e:
return f"LiteLLM error: {e}"
class OpenAIServerModel(BaseModel):
def __init__(self, api_key: str, model: str = "gpt-3.5-turbo"):
self.api_key = api_key
self.model = model
def answer(self, prompt: str) -> str:
try:
response = requests.post(
"https://api.openai.com/v1/chat/completions",
headers={
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
},
json={
"model": self.model,
"messages": [{"role": "user", "content": prompt}],
"max_tokens": 200,
"temperature": 0.0
},
timeout=30
)
response.raise_for_status()
data = response.json()
return data["choices"][0]["message"]["content"].strip()[:200]
except Exception as e:
return f"OpenAI error: {e}"
class DuckDuckGoAgent:
def __init__(self):
print("DuckDuckGoAgent initialized.")
self.headers = {"User-Agent": "Mozilla/5.0"}
self.hf_api_key = os.getenv("HF_API_TOKEN")
self.model_type = os.getenv("MODEL_TYPE", "huggingface")
self.model_name = os.getenv("MODEL_NAME", "mistralai/Mistral-7B-Instruct-v0.1")
self.model_url = os.getenv("MODEL_URL") # For LiteLLM
self.openai_key = os.getenv("OPENAI_API_KEY")
self.llm = self._init_model()
def _init_model(self) -> BaseModel:
if self.model_type == "openai" and self.openai_key:
return OpenAIServerModel(api_key=self.openai_key)
elif self.model_type == "litellm" and self.model_url:
return LiteLLMModel(endpoint_url=self.model_url)
elif self.model_type == "huggingface" and self.hf_api_key:
return HfApiModel(model_name=self.model_name, api_token=self.hf_api_key)
else:
raise ValueError("No valid model configuration found.")
def get_duckduckgo_answer(self, query: str) -> str:
search_query = urllib.parse.quote(query)
url = f"https://api.duckduckgo.com/?q={search_query}&format=json&no_html=1&skip_disambig=1"
try:
response = requests.get(url, timeout=10)
if response.status_code == 200:
data = response.json()
if 'AbstractText' in data and data['AbstractText']:
return data['AbstractText'][:200]
return self.scrape_duckduckgo(query)
return self.scrape_duckduckgo(query)
except Exception as e:
print(f"Error with DuckDuckGo API: {e}")
return self.scrape_duckduckgo(query)
def scrape_duckduckgo(self, query: str) -> str:
print("Using fallback: scraping HTML results.")
try:
response = requests.post(
"https://html.duckduckgo.com/html/",
data={"q": query},
headers=self.headers,
timeout=10
)
soup = BeautifulSoup(response.text, "html.parser")
snippets = soup.select(".result__snippet")
for s in snippets:
text = s.get_text().strip()
if text:
return text[:200]
return self.llm.answer(query)
except Exception as e:
print(f"Scraping error: {e}")
return self.llm.answer(query)
def __call__(self, question: str) -> str:
print(f"Agent received question: {question[:50]}...")
answer = self.get_duckduckgo_answer(question)
print(f"Agent returning answer: {answer}")
return answer