mcp2-backend / app.py
aymnsk's picture
Update app.py
c70570c verified
raw
history blame
1.14 kB
# app.py
import gradio as gr
from multi_inference import multi_query as deepseek_query
from agents.philosopher import PhilosopherAgent
from agents.historian import HistorianAgent
from agents.hacker import HackerAgent
from agents.comedian import ComedianAgent
# Initialize agent classes
philosopher = PhilosopherAgent()
historian = HistorianAgent()
hacker = HackerAgent()
comedian = ComedianAgent()
# Chat function
def chat(prompt):
responses = {}
responses["πŸ§™β€β™‚οΈ Philosopher"] = philosopher.run(prompt, deepseek_query)
responses["πŸ‘¨β€πŸ« Historian"] = historian.run(prompt, deepseek_query)
responses["πŸ’» Hacker"] = hacker.run(prompt, deepseek_query)
responses["🎭 Comedian"] = comedian.run(prompt, deepseek_query)
return responses
# Gradio UI setup
demo = gr.Interface(
fn=chat,
inputs=gr.Textbox(label="Ask a Question"),
outputs=gr.JSON(label="Agent Responses"),
title="🧠 Multi-Agent AI Chatroom",
description="Ask anything. Each AI agent gives a unique answer!"
)
# Only launch if run directly (used by Hugging Face too)
if __name__ == "__main__":
demo.launch()