File size: 1,420 Bytes
f33f0bf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
446bd07
a9f37bd
 
446bd07
a9f37bd
 
 
446bd07
a9f37bd
 
446bd07
a9f37bd
 
446bd07
a9f37bd
 
446bd07
a9f37bd
 
446bd07
a9f37bd
 
446bd07
a9f37bd
 
fc96461
a9f37bd
 
 
446bd07
 
 
9cc226c
 
 
 
 
f33f0bf
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import gradio as gr

from transformers import pipeline

pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es")

def predict(text):
  return pipe(text)[0]["translation_text"]

demo = gr.Interface(
  fn=predict,
  inputs='text',
  outputs='text',
)

demo.launch()

"""


import gradio as gr
import random
from smolagents import GradioUI, CodeAgent, HfApiModel

# Import our custom tools from their modules
from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
from retriever import load_guest_dataset

# Initialize the Hugging Face model
model = HfApiModel()

# Initialize the web search tool
search_tool = DuckDuckGoSearchTool()

# Initialize the weather tool
weather_info_tool = WeatherInfoTool()

# Initialize the Hub stats tool
hub_stats_tool = HubStatsTool()

# Load the guest dataset and initialize the guest info tool
guest_info_tool = load_guest_dataset()

# Create Alfred with all the tools
alfred = CodeAgent(
    tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool], 
    model=model,
    add_base_tools=True,  # Add any additional base tools
    planning_interval=3   # Enable planning every 3 steps
)

if __name__ == "__main__":
    #GradioUI(alfred).launch()
    # Example query Alfred might receive during the gala
    response = alfred.run("What is Facebook and what's their most popular model?")

    print("🎩 Alfred's Response:")
    print(response)

"""