aymnsk commited on
Commit
cb3bc65
Β·
verified Β·
1 Parent(s): c363697

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -9
app.py CHANGED
@@ -2,21 +2,23 @@
2
 
3
  from fastapi import FastAPI
4
  import gradio as gr
5
- from multi_inference import multi_query as deepseek_query # βœ… Corrected import
6
 
7
  from agents.philosopher import PhilosopherAgent
8
  from agents.historian import HistorianAgent
9
  from agents.hacker import HackerAgent
10
  from agents.comedian import ComedianAgent
11
 
12
- app = FastAPI()
13
-
14
  philosopher = PhilosopherAgent()
15
  historian = HistorianAgent()
16
  hacker = HackerAgent()
17
  comedian = ComedianAgent()
18
 
 
 
19
 
 
20
  def chat(prompt):
21
  responses = {}
22
  responses["πŸ§™β€β™‚οΈ Philosopher"] = philosopher.run(prompt, deepseek_query)
@@ -25,16 +27,18 @@ def chat(prompt):
25
  responses["🎭 Comedian"] = comedian.run(prompt, deepseek_query)
26
  return responses
27
 
28
-
29
- interface = gr.Interface(
30
  fn=chat,
31
  inputs=gr.Textbox(label="Ask a Question"),
32
  outputs=gr.JSON(label="Responses from Agents"),
33
  title="Multi-Agent AI Chatroom πŸ€–",
34
  )
35
 
36
- @app.get("/")
37
- def read_root():
38
- return {"message": "Welcome to the Multi-Agent AI Chatroom!"}
39
 
40
- app = gr.mount_gradio_app(app, interface, path="/chat")
 
 
 
 
2
 
3
  from fastapi import FastAPI
4
  import gradio as gr
5
+ from multi_inference import multi_query as deepseek_query
6
 
7
  from agents.philosopher import PhilosopherAgent
8
  from agents.historian import HistorianAgent
9
  from agents.hacker import HackerAgent
10
  from agents.comedian import ComedianAgent
11
 
12
+ # Initialize agents
 
13
  philosopher = PhilosopherAgent()
14
  historian = HistorianAgent()
15
  hacker = HackerAgent()
16
  comedian = ComedianAgent()
17
 
18
+ # Initialize FastAPI
19
+ app = FastAPI()
20
 
21
+ # Chat logic
22
  def chat(prompt):
23
  responses = {}
24
  responses["πŸ§™β€β™‚οΈ Philosopher"] = philosopher.run(prompt, deepseek_query)
 
27
  responses["🎭 Comedian"] = comedian.run(prompt, deepseek_query)
28
  return responses
29
 
30
+ # Gradio Interface
31
+ gradio_app = gr.Interface(
32
  fn=chat,
33
  inputs=gr.Textbox(label="Ask a Question"),
34
  outputs=gr.JSON(label="Responses from Agents"),
35
  title="Multi-Agent AI Chatroom πŸ€–",
36
  )
37
 
38
+ # Mount Gradio to FastAPI
39
+ app = gr.mount_gradio_app(app, gradio_app, path="/")
 
40
 
41
+ # Optional FastAPI root endpoint
42
+ @app.get("/status")
43
+ def read_status():
44
+ return {"status": "running"}