Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| from typing import List, Dict | |
| from smolagents import InferenceClientModel, LiteLLMModel, ToolCallingAgent, MCPClient | |
| import datetime | |
| time = datetime.datetime.now().astimezone().isoformat() | |
| SYSTEM_PROMPT = """You are a helpful Formula 1 assistant and strategist. You have access to various F1 data and tools to help answer questions about races, drivers, teams, and more. | |
| Be concise and accurate in your responses. If you don't know something, use the available tools to find the information. | |
| In addition, you will be asked to act as a live race engineer strategist during a Formula 1 race, making crucial calls during the event. | |
| Current time (ISO 8601): {time}""" | |
| def agent_chat(message: str, history: list): | |
| # Manually compose messages: system prompt, then history, then current user message | |
| message = f"{SYSTEM_PROMPT}\n{"\n".join([f"{x['role']}: {x['content']}" for x in history])}\nTask: {message}" | |
| return agent.run(message, max_steps=5) | |
| if __name__ == "__main__": | |
| list_tools = False # Set to True to only list tools (used for debugging) | |
| local_model = True # If you have Ollama installed, set this to True | |
| try: | |
| mcp_client = MCPClient( | |
| {"url": "https://agents-mcp-hackathon-f1-mcp-server.hf.space/gradio_api/mcp/sse", "transport": "sse"}) | |
| tools = mcp_client.get_tools() | |
| if list_tools: | |
| print("### MCP tools ### ") | |
| print("\n".join(f"Tool {1+i}: {t.name}: {t.description}" for i,t in enumerate(tools))) | |
| mcp_client.disconnect() | |
| exit(0) | |
| # Define model | |
| if local_model: | |
| model = LiteLLMModel( | |
| model_id="ollama_chat/qwen3:1.7b", | |
| api_base="http://127.0.0.1:11434", # Default ollama server | |
| num_ctx=32768, | |
| ) | |
| else: | |
| model = InferenceClientModel( | |
| model_id="deepseek-ai/DeepSeek-R1", | |
| provider="nebius", | |
| api_key=os.getenv("NEBIUS_API_KEY") | |
| ) | |
| agent = ToolCallingAgent(model=model, tools=[*tools]) | |
| chat_interface = gr.ChatInterface( | |
| fn=agent_chat, | |
| type="messages", | |
| examples=[ | |
| "What are the driver standings for the 2024 Formula 1 season?", | |
| "What is the calendar for the 2024 Formula 1 season?" | |
| ], | |
| title="๐๏ธ Formula 1 Assistant", | |
| description="This is a simple agent that uses MCP tools to answer questions about Formula 1." | |
| ) | |
| chat_interface.launch() | |
| finally: | |
| mcp_client.disconnect() |