Berry18's picture
Upload 5 files
2650708 verified
raw
history blame
3.36 kB
"""
Hugging Face Space implementation for Personal Task Manager Agent
This file serves as the entry point for the Hugging Face Space
"""
import gradio as gr
from task_manager_agent import TaskManagerAgent
import json
import os
# Initialize the agent
agent = TaskManagerAgent()
# Try to load existing tasks if available
if os.path.exists("tasks.json"):
agent.load_state("tasks.json")
def process_message(message, history):
"""Process user message and return agent response"""
response = agent.process_query(message)
# Save state after each interaction
agent.save_state("tasks.json")
return response
def get_gaia_answer(question):
"""
Function to process GAIA benchmark questions
This is the function that will be called by the GAIA API
"""
# Process the question with our agent
response = agent.process_query(question)
# For GAIA benchmark, we need to return just the answer without any formatting
# Strip any extra formatting that might be in the response
clean_response = response.strip()
return clean_response
# Create Gradio interface
with gr.Blocks(title="Personal Task Manager Agent") as demo:
gr.Markdown("# Personal Task Manager Agent")
gr.Markdown("""
This agent helps you manage tasks through natural language commands.
## Example commands:
- Add a task: "Add a new task to buy groceries"
- Add with details: "Add task to call mom priority:high due:2023-05-20 category:personal"
- List tasks: "Show me my tasks" or "What do I need to do?"
- Complete a task: "Mark task 2 as done" or "I completed task 3"
- Delete a task: "Delete task 1" or "Remove task 4"
- Filter tasks: "Show high priority tasks" or "List personal tasks"
- Get help: "Help me" or "What can you do?"
""")
chatbot = gr.Chatbot(height=400)
msg = gr.Textbox(label="Type your command here")
clear = gr.Button("Clear")
def user(message, history):
return "", history + [[message, None]]
def bot(history):
message = history[-1][0]
response = process_message(message, history)
history[-1][1] = response
return history
msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
bot, chatbot, chatbot
)
clear.click(lambda: None, None, chatbot, queue=False)
# Add GAIA API endpoint explanation
gr.Markdown("""
## GAIA Benchmark API
This Space includes an API endpoint for the GAIA benchmark. The API processes questions
and returns answers in the format expected by the benchmark.
The endpoint is automatically available when deployed on Hugging Face Spaces.
""")
# For GAIA API endpoint
def gaia_api(question):
"""API endpoint for GAIA benchmark"""
answer = get_gaia_answer(question)
return {"answer": answer}
# Launch the app
if __name__ == "__main__":
# Set up FastAPI for GAIA benchmark
from fastapi import FastAPI, Request
import uvicorn
from pydantic import BaseModel
app = FastAPI()
class Question(BaseModel):
question: str
@app.post("/api/gaia")
async def api_gaia(question: Question):
return gaia_api(question.question)
# Mount Gradio app
demo.launch(server_name="0.0.0.0", server_port=7860)