Spaces:
Runtime error
Runtime error
Fix
Browse files
agent.py
CHANGED
@@ -296,16 +296,37 @@ def build_graph(provider: str = "huggingface"):
|
|
296 |
|
297 |
if provider == "huggingface":
|
298 |
# Use a more capable model from HuggingFace
|
299 |
-
|
300 |
-
|
301 |
-
|
302 |
-
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
309 |
else:
|
310 |
raise ValueError("Only 'huggingface' provider is supported in this version.")
|
311 |
|
|
|
296 |
|
297 |
if provider == "huggingface":
|
298 |
# Use a more capable model from HuggingFace
|
299 |
+
try:
|
300 |
+
# Try with a well-supported model first
|
301 |
+
endpoint = HuggingFaceEndpoint(
|
302 |
+
repo_id="google/flan-t5-base", # This model works well with the current setup
|
303 |
+
temperature=0.1,
|
304 |
+
huggingfacehub_api_token=hf_token,
|
305 |
+
max_new_tokens=512,
|
306 |
+
task="text2text-generation"
|
307 |
+
)
|
308 |
+
llm = ChatHuggingFace(llm=endpoint)
|
309 |
+
except Exception as e:
|
310 |
+
print(f"Failed to initialize google/flan-t5-base: {e}")
|
311 |
+
# Fallback to another model
|
312 |
+
try:
|
313 |
+
endpoint = HuggingFaceEndpoint(
|
314 |
+
repo_id="microsoft/DialoGPT-medium",
|
315 |
+
temperature=0.1,
|
316 |
+
huggingfacehub_api_token=hf_token,
|
317 |
+
max_new_tokens=512
|
318 |
+
)
|
319 |
+
llm = ChatHuggingFace(llm=endpoint)
|
320 |
+
except Exception as e2:
|
321 |
+
print(f"Failed to initialize DialoGPT-medium: {e2}")
|
322 |
+
# Final fallback
|
323 |
+
endpoint = HuggingFaceEndpoint(
|
324 |
+
repo_id="bigscience/bloom-560m",
|
325 |
+
temperature=0.1,
|
326 |
+
huggingfacehub_api_token=hf_token,
|
327 |
+
max_new_tokens=256
|
328 |
+
)
|
329 |
+
llm = ChatHuggingFace(llm=endpoint)
|
330 |
else:
|
331 |
raise ValueError("Only 'huggingface' provider is supported in this version.")
|
332 |
|