Ferocious0xide's picture
Update app.py
3121027 verified
raw
history blame
5 kB
"""
HuggingFace and Gradio Agent Template
Requirements:
pip install -r requirements.txt
"""
import os
from smolagents import CodeAgent, HfApiModel, load_tool, tool
import datetime
import pytz
import yaml
import gradio as gr
from tools.final_answer import FinalAnswerTool
from Gradio_UI import GradioUI
from typing import Dict, Any
from huggingface_hub import InferenceClient
# Example requirements.txt content (save this separately)
REQUIREMENTS = """
gradio>=4.0.0
huggingface-hub>=0.19.0
smolagents
pytz
pyyaml
"""
# Basic working tool example
@tool
def calculator(operation: str) -> str:
"""A simple calculator tool that safely evaluates basic math expressions."""
try:
allowed_chars = set("0123456789+-*/ .()")
if not all(c in allowed_chars for c in operation):
return "Error: Only basic math operations allowed"
result = eval(operation, {"__builtins__": {}})
return f"Result: {result}"
except Exception as e:
return f"Error calculating {operation}: {str(e)}"
@tool
def get_time(timezone: str = "UTC") -> str:
"""Get current time in specified timezone."""
try:
tz = pytz.timezone(timezone)
current_time = datetime.datetime.now(tz)
return f"Current time in {timezone}: {current_time.strftime('%Y-%m-%d %H:%M:%S %Z')}"
except Exception as e:
return f"Error getting time for {timezone}: {str(e)}"
# Example HuggingFace tool
@tool
def text_generation(prompt: str) -> str:
"""Generate text using HuggingFace model.
Args:
prompt: Text prompt for generation
Returns:
str: Generated text or error message
"""
try:
# Using HF Inference API
client = InferenceClient()
# You can change the model to any available on HF
response = client.text_generation(
prompt,
model="google/gemma-7b-it", # Example model
max_new_tokens=100,
temperature=0.7
)
return response
except Exception as e:
return f"Error generating text: {str(e)}"
# Create default prompts.yaml
DEFAULT_PROMPTS = """
system_prompt: |-
You are an expert assistant who can solve tasks using Python code and available tools.
You proceed step by step using 'Thought:', 'Code:', and 'Observation:' sequences.
Here's an example:
Task: "Calculate 23 * 45 and generate a short story about the number"
Thought: First, I'll calculate the multiplication.
Code:
```py
result = calculator("23 * 45")
print(result)
```<end_code>
Observation: Result: 1035
Thought: Now I'll generate a short story about this number.
Code:
```py
story = text_generation(f"Write a very short story about the number {1035}")
final_answer(f"The calculation result is 1035.\\nHere's a story about it:\\n{story}")
```<end_code>
You have access to these tools:
- calculator: Evaluates basic math expressions
- get_time: Gets current time in any timezone
- text_generation: Generates text using HuggingFace model
- final_answer: Returns the final answer to the user
Rules:
1. Always use 'Thought:', 'Code:', and end with '<end_code>'
2. Only use defined variables
3. Pass arguments directly to tools
4. Use print() to save intermediate results
5. End with final_answer tool
[... rest of the prompts.yaml content remains the same ...]
"""
def ensure_files():
"""Create necessary files if they don't exist."""
if not os.path.exists("prompts.yaml"):
with open("prompts.yaml", "w") as f:
f.write(DEFAULT_PROMPTS)
if not os.path.exists("requirements.txt"):
with open("requirements.txt", "w") as f:
f.write(REQUIREMENTS)
def initialize_agent() -> CodeAgent:
"""Initialize and return a working CodeAgent."""
# Ensure necessary files exist
ensure_files()
# Initialize tools
final_answer = FinalAnswerTool()
# Initialize model
model = HfApiModel(
max_tokens=2096,
temperature=0.5,
model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
custom_role_conversions=None,
)
# Load prompts
with open("prompts.yaml", "r") as f:
prompt_templates = yaml.safe_load(f)
# Create agent
agent = CodeAgent(
model=model,
tools=[
final_answer,
calculator,
get_time,
text_generation,
# Add new tools here
],
max_steps=6,
verbosity_level=1,
grammar=None,
planning_interval=None,
name=None,
description=None,
prompt_templates=prompt_templates
)
return agent
def main():
"""Run the agent with Gradio UI."""
try:
agent = initialize_agent()
GradioUI(agent).launch()
except Exception as e:
print(f"Error starting agent: {str(e)}")
raise
if __name__ == "__main__":
main()