largo commited on
Commit
54d5176
Β·
1 Parent(s): fe01baa

Working the deploy

Browse files
Files changed (1) hide show
  1. app.py +20 -123
app.py CHANGED
@@ -1,134 +1,31 @@
1
- #!/usr/bin/env python3
2
- """
3
- SmolAgents Authentication Fix
4
- Resolves 401 "Invalid username or password" errors
5
- """
6
-
7
  import os
8
- from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, InferenceClientModel
9
-
10
- # Method 1: Explicit token (most reliable)
11
- def create_agent_with_token():
12
- """Create agent with explicit HF token - most reliable method"""
13
- # Replace with your actual HuggingFace token
14
- hf_token = "hf_xxxxxxxxxxxxxxxxxxxxxxxxxx"
15
-
16
- model = HfApiModel(
17
- model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
18
- token=hf_token
19
- )
20
-
21
- agent = CodeAgent(
22
- tools=[DuckDuckGoSearchTool()],
23
- model=model
24
- )
25
- return agent
26
-
27
- # Method 2: Environment variable (recommended for production)
28
- def create_agent_with_env_var():
29
- """Create agent using HF_TOKEN environment variable"""
30
- # Set environment variable first:
31
- # export HF_TOKEN="hf_xxxxxxxxxxxxxxxxxxxxxxxxxx"
32
-
33
- # Verify token is set
34
- if not os.getenv("HF_TOKEN"):
35
- raise ValueError("HF_TOKEN environment variable not set!")
36
-
37
- model = HfApiModel(
38
- model_id="Qwen/Qwen2.5-Coder-32B-Instruct"
39
- # token will be read from HF_TOKEN automatically
40
- )
41
 
42
- agent = CodeAgent(
43
- tools=[DuckDuckGoSearchTool()],
44
- model=model
45
- )
46
- return agent
47
 
48
- # Method 3: InferenceClientModel (newest approach, 2025)
49
- def create_agent_with_inference_client():
50
- """Create agent using newer InferenceClientModel - better error handling"""
51
- hf_token = "hf_xxxxxxxxxxxxxxxxxxxxxxxxxx"
52
 
53
- model = InferenceClientModel(
54
- model_id="Qwen/Qwen2.5-Coder-32B-Instruct",
55
- token=hf_token
56
  )
 
 
 
57
 
58
- agent = CodeAgent(
59
- tools=[DuckDuckGoSearchTool()],
60
- model=model
61
- )
62
- return agent
63
 
64
- # Method 4: Login first (alternative approach)
65
- def create_agent_with_login():
66
- """Create agent after logging in to HuggingFace Hub"""
67
- from huggingface_hub import login
68
 
69
- # Login to HF (will prompt for token if not provided)
70
- login(token="hf_xxxxxxxxxxxxxxxxxxxxxxxxxx")
71
 
72
- # Now HfApiModel should work without explicit token
73
- model = HfApiModel(
74
- model_id="Qwen/Qwen2.5-Coder-32B-Instruct"
 
 
 
75
  )
76
 
77
- agent = CodeAgent(
78
- tools=[DuckDuckGoSearchTool()],
79
- model=model
80
- )
81
- return agent
82
-
83
- def test_agent(agent):
84
- """Test the agent with a simple query"""
85
- try:
86
- result = agent.run("What is the current time?")
87
- print("βœ… Success! Agent is working properly.")
88
- print(f"Result: {result}")
89
- return True
90
- except Exception as e:
91
- print(f"❌ Error: {e}")
92
- return False
93
-
94
- if __name__ == "__main__":
95
- print("Testing SmolAgents authentication fixes...\n")
96
-
97
- # Try Method 1 first (most common solution)
98
- print("Method 1: Explicit token")
99
- try:
100
- agent = create_agent_with_token()
101
- if test_agent(agent):
102
- print("βœ… Method 1 successful!\n")
103
- else:
104
- print("❌ Method 1 failed, trying next method...\n")
105
- except Exception as e:
106
- print(f"❌ Method 1 failed: {e}\n")
107
-
108
- # Try Method 3 (newest approach)
109
- print("Method 3: InferenceClientModel")
110
- try:
111
- agent = create_agent_with_inference_client()
112
- if test_agent(agent):
113
- print("βœ… Method 3 successful!\n")
114
- except Exception as e:
115
- print(f"❌ Method 3 failed: {e}\n")
116
-
117
- # Token Requirements Checklist:
118
- """
119
- Your HuggingFace token must have these permissions:
120
- βœ… "Make calls to the serverless Inference API"
121
- βœ… "Read access to contents of all public gated repos" (for gated models)
122
-
123
- To get your token:
124
- 1. Go to: https://huggingface.co/settings/tokens
125
- 2. Click "New token"
126
- 3. Select "Write" permissions
127
- 4. Copy the token (starts with hf_)
128
-
129
- Common issues:
130
- - Token is expired or revoked
131
- - Token lacks proper permissions
132
- - Model is gated and requires special access
133
- - Network/firewall blocking HF API calls
134
- """
 
1
+ import gradio as gr
 
 
 
 
 
2
  import os
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
 
4
+ from smolagents import InferenceClientModel, CodeAgent, MCPClient
 
 
 
 
5
 
 
 
 
 
6
 
7
+ try:
8
+ mcp_client = MCPClient(
9
+ {"url": "https://abidlabs-mcp-tool-http.hf.space/gradio_api/mcp/sse"}
10
  )
11
+ # mcp_client = MCPClient(
12
+ # {"url": " http://127.0.0.1:7860/gradio_api/mcp/sse"}
13
+ #)
14
 
15
+ tools = mcp_client.get_tools()
 
 
 
 
16
 
17
+ model = InferenceClientModel(token=os.getenv("HF_TOKEN"))
 
 
 
18
 
19
+ agent = CodeAgent(tools=[*tools], model=model, additional_authorized_imports=["json", "ast", "urllib", "base64"])
 
20
 
21
+ demo = gr.ChatInterface(
22
+ fn=lambda message, history: str(agent.run(message)),
23
+ type="messages",
24
+ examples=["Analyze the sentiment of the following text 'This is awesome'"],
25
+ title="Agent with MCP Tools",
26
+ description="This is a simple agent that uses MCP tools to answer questions.",
27
  )
28
 
29
+ demo.launch(share=True)
30
+ finally:
31
+ mcp_client.disconnect()