sam522's picture
app
f33f0bf
import gradio as gr
from transformers import pipeline
pipe = pipeline("translation", model="Helsinki-NLP/opus-mt-en-es")
def predict(text):
return pipe(text)[0]["translation_text"]
demo = gr.Interface(
fn=predict,
inputs='text',
outputs='text',
)
demo.launch()
"""
import gradio as gr
import random
from smolagents import GradioUI, CodeAgent, HfApiModel
# Import our custom tools from their modules
from tools import DuckDuckGoSearchTool, WeatherInfoTool, HubStatsTool
from retriever import load_guest_dataset
# Initialize the Hugging Face model
model = HfApiModel()
# Initialize the web search tool
search_tool = DuckDuckGoSearchTool()
# Initialize the weather tool
weather_info_tool = WeatherInfoTool()
# Initialize the Hub stats tool
hub_stats_tool = HubStatsTool()
# Load the guest dataset and initialize the guest info tool
guest_info_tool = load_guest_dataset()
# Create Alfred with all the tools
alfred = CodeAgent(
tools=[guest_info_tool, weather_info_tool, hub_stats_tool, search_tool],
model=model,
add_base_tools=True, # Add any additional base tools
planning_interval=3 # Enable planning every 3 steps
)
if __name__ == "__main__":
#GradioUI(alfred).launch()
# Example query Alfred might receive during the gala
response = alfred.run("What is Facebook and what's their most popular model?")
print("🎩 Alfred's Response:")
print(response)
"""