captain-awesome commited on
Commit
695f9e9
·
verified ·
1 Parent(s): bab0dab

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -7
app.py CHANGED
@@ -62,9 +62,11 @@ from transformers import AutoModelForCausalLM
62
  import torch
63
 
64
  try:
65
- mcp_client = MCPClient({"url": "https://captain-awesome-alquranchapters.hf.space/gradio_api/mcp/sse"})
66
- await mcp_client.connect() # or use async context manager if supported
67
-
 
 
68
  tools = mcp_client.get_tools()
69
 
70
  model_id = "unsloth/Llama-3.2-1B"
@@ -77,10 +79,6 @@ try:
77
 
78
  agent = CodeAgent(tools=tools, model=model)
79
 
80
- async def gradio_fn(message, history):
81
- response = await agent.run(message) # assuming run is async
82
- return str(response)
83
-
84
  demo = gr.ChatInterface(
85
  fn=lambda message, history: str(agent.run(message)),
86
  type="messages",
 
62
  import torch
63
 
64
  try:
65
+ mcp_client = MCPClient(
66
+ ## Try this working example on the hub:
67
+ # {"url": "https://abidlabs-mcp-tools.hf.space/gradio_api/mcp/sse"}
68
+ {"url": "http://localhost:7860/gradio_api/mcp/sse"}
69
+ )
70
  tools = mcp_client.get_tools()
71
 
72
  model_id = "unsloth/Llama-3.2-1B"
 
79
 
80
  agent = CodeAgent(tools=tools, model=model)
81
 
 
 
 
 
82
  demo = gr.ChatInterface(
83
  fn=lambda message, history: str(agent.run(message)),
84
  type="messages",