Manavraj's picture
Update app.py
6c38b0b verified
raw
history blame
4.86 kB
import gradio as gr
import requests
from smolagents import CodeAgent
import json
import re
import logging
from tenacity import retry, stop_after_attempt, wait_exponential
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Correct URL based on your Space
HF_SPACE_URL = "https://manavraj-troubleshoot-mcp.hf.space"
@retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=4, max=10))
def call_mcp_server(message, tool_type="knowledge_base"):
"""Call MCP server with proper endpoint routing"""
try:
# Map tool types to their respective API endpoints
endpoint_map = {
"knowledge_base": "api/knowledge_base",
"web_search": "api/web_search",
"formatter": "api/formatter"
}
endpoint = endpoint_map.get(tool_type, "api/knowledge_base")
url = f"{HF_SPACE_URL}/{endpoint}"
logger.info(f"Calling MCP endpoint: {url}")
response = requests.post(
url,
json={"data": [message]},
verify=False, # Temporary for debugging
timeout=30
)
if response.status_code != 200:
raise Exception(f"MCP server returned {response.status_code}")
data = response.json()
if not isinstance(data, dict) or 'data' not in data:
raise Exception("Invalid MCP response format")
return data['data'][0]
except Exception as e:
logger.error(f"MCP call failed: {str(e)}")
return f"MCP server error: {str(e)}"
def extract_thought_action_observation(response):
"""Extract TAO cycle from response"""
sections = {
'thought': '',
'action': '',
'observation': ''
}
patterns = {
'thought': r'(?:THOUGHT|Thought):\s*(.*?)(?=(?:ACTION|Action|OBSERVATION|Observation|$))',
'action': r'(?:ACTION|Action):\s*(.*?)(?=(?:OBSERVATION|Observation|FINAL|Final|$))',
'observation': r'(?:OBSERVATION|Observation):\s*(.*?)(?=(?:FINAL|Final|$))'
}
for section, pattern in patterns.items():
match = re.search(pattern, response, re.DOTALL | re.IGNORECASE)
if match:
sections[section] = match.group(1).strip()
return sections
# Initialize CodeAgent
agent = CodeAgent(
tools=[],
model="microsoft/DialoGPT-medium",
system_prompt="""[Previous system prompt remains unchanged]"""
)
def determine_tool_type(message):
"""Determine which tool to use"""
message_lower = message.lower()
tech_keywords = ["wifi", "screen", "computer", "error"]
search_keywords = ["search", "find", "news", "how to"]
format_keywords = ["format", "organize", "steps"]
if any(k in message_lower for k in tech_keywords):
return "knowledge_base"
elif any(k in message_lower for k in search_keywords):
return "web_search"
elif any(k in message_lower for k in format_keywords):
return "formatter"
return "knowledge_base"
def chat_interface(message, history):
"""Enhanced chat interface with proper error handling"""
try:
# Get initial thought
thinking_prompt = f"User Query: {message}\n\nTHOUGHT: Analyze this query"
agent_response = agent.run(thinking_prompt)
if not isinstance(agent_response, str):
agent_response = str(agent_response)
cycle_parts = extract_thought_action_observation(agent_response)
tool_type = determine_tool_type(message)
# Call MCP server
mcp_response = call_mcp_server(message, tool_type)
# Generate final response
final_prompt = f"""
User Query: {message}
THOUGHT: {cycle_parts.get('thought', 'Analysis complete')}
ACTION: Used {tool_type} tool
OBSERVATION: {mcp_response}
FINAL RESPONSE: Provide a complete solution
"""
final_response = agent.run(final_prompt)
if not isinstance(final_response, str):
final_response = str(final_response)
return f"""πŸ€” **THOUGHT:** {cycle_parts.get('thought', '')}
⚑ **ACTION:** Used {tool_type.replace('_', ' ')}
πŸ‘οΈ **OBSERVATION:** {mcp_response[:200]}{'...' if len(mcp_response) > 200 else ''}
βœ… **SOLUTION:**\n{final_response}"""
except Exception as e:
logger.error(f"Chat error: {str(e)}")
return f"Error processing request: {str(e)}"
# Gradio interface
demo = gr.ChatInterface(
fn=chat_interface,
title="πŸ”§ Technical Support Agent",
examples=[
"My wifi keeps disconnecting",
"Search for latest tech news",
"Format these steps: Restart. Check cables. Test"
]
)
if __name__ == "__main__":
demo.launch()