mcp2-backend / app.py
aymnsk's picture
Update app.py
cb3bc65 verified
raw
history blame
1.22 kB
# app.py
from fastapi import FastAPI
import gradio as gr
from multi_inference import multi_query as deepseek_query
from agents.philosopher import PhilosopherAgent
from agents.historian import HistorianAgent
from agents.hacker import HackerAgent
from agents.comedian import ComedianAgent
# Initialize agents
philosopher = PhilosopherAgent()
historian = HistorianAgent()
hacker = HackerAgent()
comedian = ComedianAgent()
# Initialize FastAPI
app = FastAPI()
# Chat logic
def chat(prompt):
responses = {}
responses["πŸ§™β€β™‚οΈ Philosopher"] = philosopher.run(prompt, deepseek_query)
responses["πŸ‘¨β€πŸ« Historian"] = historian.run(prompt, deepseek_query)
responses["πŸ’» Hacker"] = hacker.run(prompt, deepseek_query)
responses["🎭 Comedian"] = comedian.run(prompt, deepseek_query)
return responses
# Gradio Interface
gradio_app = gr.Interface(
fn=chat,
inputs=gr.Textbox(label="Ask a Question"),
outputs=gr.JSON(label="Responses from Agents"),
title="Multi-Agent AI Chatroom πŸ€–",
)
# Mount Gradio to FastAPI
app = gr.mount_gradio_app(app, gradio_app, path="/")
# Optional FastAPI root endpoint
@app.get("/status")
def read_status():
return {"status": "running"}