Spaces:
Sleeping
Sleeping
File size: 4,859 Bytes
4c95634 99dcb22 0b184f7 957adf6 0b184f7 957adf6 6c38b0b 0b184f7 99dcb22 6c38b0b 4c95634 6c38b0b 99dcb22 6c38b0b 99dcb22 6c38b0b 0b184f7 6c38b0b 0b184f7 4c95634 6c38b0b 4c95634 a3f7cf5 99dcb22 6c38b0b 99dcb22 6c38b0b 99dcb22 6c38b0b 99dcb22 6c38b0b 4c95634 b490652 4c95634 6c38b0b 4c95634 99dcb22 6c38b0b 99dcb22 6c38b0b 99dcb22 6c38b0b 99dcb22 6c38b0b 99dcb22 6c38b0b 99dcb22 4c95634 6c38b0b 4c95634 6c38b0b 99dcb22 6c38b0b 0b184f7 6c38b0b 99dcb22 6c38b0b 99dcb22 6c38b0b 99dcb22 6c38b0b 99dcb22 0b184f7 99dcb22 6c38b0b 99dcb22 6c38b0b 99dcb22 6c38b0b 4c95634 6c38b0b 4c95634 6c38b0b 4c95634 6c38b0b 4c95634 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
import gradio as gr
import requests
from smolagents import CodeAgent
import json
import re
import logging
from tenacity import retry, stop_after_attempt, wait_exponential
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
# Correct URL based on your Space
HF_SPACE_URL = "https://manavraj-troubleshoot-mcp.hf.space"
@retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=4, max=10))
def call_mcp_server(message, tool_type="knowledge_base"):
"""Call MCP server with proper endpoint routing"""
try:
# Map tool types to their respective API endpoints
endpoint_map = {
"knowledge_base": "api/knowledge_base",
"web_search": "api/web_search",
"formatter": "api/formatter"
}
endpoint = endpoint_map.get(tool_type, "api/knowledge_base")
url = f"{HF_SPACE_URL}/{endpoint}"
logger.info(f"Calling MCP endpoint: {url}")
response = requests.post(
url,
json={"data": [message]},
verify=False, # Temporary for debugging
timeout=30
)
if response.status_code != 200:
raise Exception(f"MCP server returned {response.status_code}")
data = response.json()
if not isinstance(data, dict) or 'data' not in data:
raise Exception("Invalid MCP response format")
return data['data'][0]
except Exception as e:
logger.error(f"MCP call failed: {str(e)}")
return f"MCP server error: {str(e)}"
def extract_thought_action_observation(response):
"""Extract TAO cycle from response"""
sections = {
'thought': '',
'action': '',
'observation': ''
}
patterns = {
'thought': r'(?:THOUGHT|Thought):\s*(.*?)(?=(?:ACTION|Action|OBSERVATION|Observation|$))',
'action': r'(?:ACTION|Action):\s*(.*?)(?=(?:OBSERVATION|Observation|FINAL|Final|$))',
'observation': r'(?:OBSERVATION|Observation):\s*(.*?)(?=(?:FINAL|Final|$))'
}
for section, pattern in patterns.items():
match = re.search(pattern, response, re.DOTALL | re.IGNORECASE)
if match:
sections[section] = match.group(1).strip()
return sections
# Initialize CodeAgent
agent = CodeAgent(
tools=[],
model="microsoft/DialoGPT-medium",
system_prompt="""[Previous system prompt remains unchanged]"""
)
def determine_tool_type(message):
"""Determine which tool to use"""
message_lower = message.lower()
tech_keywords = ["wifi", "screen", "computer", "error"]
search_keywords = ["search", "find", "news", "how to"]
format_keywords = ["format", "organize", "steps"]
if any(k in message_lower for k in tech_keywords):
return "knowledge_base"
elif any(k in message_lower for k in search_keywords):
return "web_search"
elif any(k in message_lower for k in format_keywords):
return "formatter"
return "knowledge_base"
def chat_interface(message, history):
"""Enhanced chat interface with proper error handling"""
try:
# Get initial thought
thinking_prompt = f"User Query: {message}\n\nTHOUGHT: Analyze this query"
agent_response = agent.run(thinking_prompt)
if not isinstance(agent_response, str):
agent_response = str(agent_response)
cycle_parts = extract_thought_action_observation(agent_response)
tool_type = determine_tool_type(message)
# Call MCP server
mcp_response = call_mcp_server(message, tool_type)
# Generate final response
final_prompt = f"""
User Query: {message}
THOUGHT: {cycle_parts.get('thought', 'Analysis complete')}
ACTION: Used {tool_type} tool
OBSERVATION: {mcp_response}
FINAL RESPONSE: Provide a complete solution
"""
final_response = agent.run(final_prompt)
if not isinstance(final_response, str):
final_response = str(final_response)
return f"""π€ **THOUGHT:** {cycle_parts.get('thought', '')}
β‘ **ACTION:** Used {tool_type.replace('_', ' ')}
ποΈ **OBSERVATION:** {mcp_response[:200]}{'...' if len(mcp_response) > 200 else ''}
β
**SOLUTION:**\n{final_response}"""
except Exception as e:
logger.error(f"Chat error: {str(e)}")
return f"Error processing request: {str(e)}"
# Gradio interface
demo = gr.ChatInterface(
fn=chat_interface,
title="π§ Technical Support Agent",
examples=[
"My wifi keeps disconnecting",
"Search for latest tech news",
"Format these steps: Restart. Check cables. Test"
]
)
if __name__ == "__main__":
demo.launch() |