LamiaYT commited on
Commit
7343388
·
1 Parent(s): 695f802
Files changed (1) hide show
  1. agent.py +31 -10
agent.py CHANGED
@@ -296,16 +296,37 @@ def build_graph(provider: str = "huggingface"):
296
 
297
  if provider == "huggingface":
298
  # Use a more capable model from HuggingFace
299
- endpoint = HuggingFaceEndpoint(
300
- repo_id="microsoft/DialoGPT-large", # You can also try "google/flan-t5-xl" or "bigscience/bloom-7b1"
301
- temperature=0.1,
302
- huggingfacehub_api_token=hf_token,
303
- model_kwargs={
304
- "max_length": 1024,
305
- "return_full_text": False
306
- }
307
- )
308
- llm = ChatHuggingFace(llm=endpoint)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
309
  else:
310
  raise ValueError("Only 'huggingface' provider is supported in this version.")
311
 
 
296
 
297
  if provider == "huggingface":
298
  # Use a more capable model from HuggingFace
299
+ try:
300
+ # Try with a well-supported model first
301
+ endpoint = HuggingFaceEndpoint(
302
+ repo_id="google/flan-t5-base", # This model works well with the current setup
303
+ temperature=0.1,
304
+ huggingfacehub_api_token=hf_token,
305
+ max_new_tokens=512,
306
+ task="text2text-generation"
307
+ )
308
+ llm = ChatHuggingFace(llm=endpoint)
309
+ except Exception as e:
310
+ print(f"Failed to initialize google/flan-t5-base: {e}")
311
+ # Fallback to another model
312
+ try:
313
+ endpoint = HuggingFaceEndpoint(
314
+ repo_id="microsoft/DialoGPT-medium",
315
+ temperature=0.1,
316
+ huggingfacehub_api_token=hf_token,
317
+ max_new_tokens=512
318
+ )
319
+ llm = ChatHuggingFace(llm=endpoint)
320
+ except Exception as e2:
321
+ print(f"Failed to initialize DialoGPT-medium: {e2}")
322
+ # Final fallback
323
+ endpoint = HuggingFaceEndpoint(
324
+ repo_id="bigscience/bloom-560m",
325
+ temperature=0.1,
326
+ huggingfacehub_api_token=hf_token,
327
+ max_new_tokens=256
328
+ )
329
+ llm = ChatHuggingFace(llm=endpoint)
330
  else:
331
  raise ValueError("Only 'huggingface' provider is supported in this version.")
332