File size: 2,413 Bytes
4990391
9b5b26a
f78a8ef
 
6aae614
9b5b26a
8563f1a
b8c8cbd
72da679
9b5b26a
b8c8cbd
9b5b26a
 
 
 
 
 
8c01ffb
b8c8cbd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c73fca6
6aae614
ae7a494
b8c8cbd
e121372
8563f1a
b8c8cbd
8563f1a
 
13d500a
8c01ffb
b8c8cbd
c73fca6
 
 
861422e
 
8563f1a
b8c8cbd
8c01ffb
8fe992b
b8c8cbd
 
 
 
 
 
 
 
8c01ffb
 
 
 
b8c8cbd
 
861422e
8fe992b
 
c73fca6
8563f1a
72da679
b8c8cbd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
import datetime
import pytz
import yaml
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI

# Define tool for fetching current time in a timezone
@tool
def get_current_time_in_timezone(timezone: str) -> str:
    """Fetches the current local time in a specified timezone."""
    try:
        tz = pytz.timezone(timezone)
        local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
        return f"The current local time in {timezone} is: {local_time}"
    except Exception as e:
        return f"Error fetching time for timezone '{timezone}': {str(e)}"

# Define tool for AI research paper search
@tool
def fetch_ai_research_papers(query: str) -> str:
    """Fetches recent AI research papers based on the query."""
    search_tool = DuckDuckGoSearchTool()
    results = search_tool.search(f"AI research paper {query}")
    return f"Top AI research papers on {query}: {results}"

# Define tool for AI news updates
@tool
def fetch_ai_news() -> str:
    """Fetches the latest AI news updates."""
    search_tool = DuckDuckGoSearchTool()
    results = search_tool.search("latest AI news")
    return f"Latest AI news updates: {results}"

# Load final answer tool
final_answer = FinalAnswerTool()

# Define the model for human-like responses
model = HfApiModel(
    max_tokens=2096,
    temperature=0.7,  # Increased temperature for more human-like responses
    model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
    custom_role_conversions=None,
)

# Load text-to-image generation tool
image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)

# Load prompt templates
with open("prompts.yaml", 'r') as stream:
    prompt_templates = yaml.safe_load(stream)

# Create the AI Assistant Agent
agent = CodeAgent(
    model=model,
    tools=[
        final_answer,
        DuckDuckGoSearchTool(),
        get_current_time_in_timezone,
        fetch_ai_research_papers,
        fetch_ai_news,
        image_generation_tool
    ],
    max_steps=6,
    verbosity_level=1,
    grammar=None,
    planning_interval=None,
    name="AI Research Assistant",
    description="An AI assistant capable of answering queries, fetching AI research, news, generating images, and interacting naturally.",
    prompt_templates=prompt_templates
)

# Launch UI
GradioUI(agent).launch()