File size: 3,152 Bytes
9b5b26a c19d193 6aae614 00c6dfa 8fe992b 9b5b26a 93e8ab1 eebba0a 75b0936 0d3f705 9b5b26a 0d3f705 9b5b26a 0d3f705 9b5b26a 00c6dfa 0d3f705 00c6dfa 0d3f705 9b5b26a 0d3f705 8c01ffb 75b0936 93e8ab1 75b0936 8c01ffb 75b0936 93e8ab1 75b0936 93e8ab1 75b0936 6aae614 ae7a494 e121372 bf6d34c 29ec968 fe328e0 13d500a 8c01ffb 9b5b26a 8c01ffb 861422e 9b5b26a 8c01ffb 8fe992b 75b0936 8c01ffb 861422e 8fe992b 9b5b26a 8c01ffb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 |
from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
import datetime
import requests
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from PIL import Image
from io import BytesIO
from Gradio_UI import GradioUI
# Load the text generation pipeline for LLM
llm = pipeline("text-generation", model="Qwen/Qwen2.5-Coder-32B-Instruct")
@tool
def company_logos(company_name: str) -> Image.Image:
"""A tool that retrieves the logo of a given company using the Clearbit Logo API.
Args:
company_name: The name of the company.
Returns:
The URL of the company's logo or an error message if retrieval fails.
"""
# Convert company name into a domain-friendly format
company_domain = company_name.lower().replace(" ", "") + ".com"
# Clearbit Logo API endpoint
logo_url = f"https://logo.clearbit.com/{company_domain}?token=sk_VoU5zjclT8Ot9RoyAbAh9g"
try:
# Make a request to get the logo image
response = requests.get(logo_url)
if response.status_code == 200:
# Convert response content to an image
image = Image.open(BytesIO(response.content))
return image
else:
return f"Could not find a logo for {company_name}."
except Exception as e:
return f"Error fetching logo for {company_name}: {str(e)}"
@tool
def company_description(company_name: str) -> str:
"""Generates a short company description using an LLM.
Args:
company_name: The name of the company.
Returns:
A short description of the company.
"""
prompt = (
f"Provide a short summary about {company_name}. Include:\n"
"- One sentence describing the company's main product or service.\n"
"- Two sentences summarizing the company's history.\n"
"- One sentence about the latest news related to the company."
)
try:
response = llm(prompt, max_length=150, do_sample=True)
return response[0]["generated_text"]
except Exception as e:
return f"Error generating description for {company_name}: {str(e)}"
final_answer = FinalAnswerTool()
# If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
# model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
custom_role_conversions=None,
)
# Import tool from Hub
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
agent = CodeAgent(
model=model,
tools=[final_answer, company_logos, company_description], ## add your tools here (don't remove final answer)
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
GradioUI(agent).launch() |