Tesneem commited on
Commit
d7880e6
·
verified ·
1 Parent(s): 9502853

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -11
app.py CHANGED
@@ -70,19 +70,24 @@ def format_docs(docs: List[Document]) -> str:
70
 
71
  # =================== Generate Response from Hugging Face Model ===================
72
  def generate_response(input_dict: Dict[str, Any]) -> str:
73
- client = InferenceClient(api_key=HF_TOKEN)
74
  prompt = grantbuddy_prompt.format(**input_dict)
75
 
76
- response = client.chat.completions.create(
77
- model="Qwen/Qwen2.5-1.5B-Instruct",
78
- messages=[
79
- {"role": "system", "content": prompt},
80
- {"role": "user", "content": input_dict["question"]},
81
- ],
82
- max_tokens=1000,
83
- temperature=0.2,
84
- )
85
- return response.choices[0].message.content
 
 
 
 
 
86
 
87
  # =================== RAG Chain ===================
88
  def get_rag_chain(retriever):
 
70
 
71
  # =================== Generate Response from Hugging Face Model ===================
72
  def generate_response(input_dict: Dict[str, Any]) -> str:
73
+ client = InferenceClient(api_key=HF_TOKEN.strip())
74
  prompt = grantbuddy_prompt.format(**input_dict)
75
 
76
+ try:
77
+ response = client.chat.completions.create(
78
+ model="Qwen/Qwen2.5-1.5B-Instruct",
79
+ messages=[
80
+ {"role": "system", "content": prompt},
81
+ {"role": "user", "content": input_dict["question"]},
82
+ ],
83
+ max_tokens=1000,
84
+ temperature=0.2,
85
+ )
86
+ return response.choices[0].message.content
87
+ except Exception as e:
88
+ st.error(f"❌ Error from model: {e}")
89
+ return "⚠️ Failed to generate response. Please check your model, HF token, or request format."
90
+
91
 
92
  # =================== RAG Chain ===================
93
  def get_rag_chain(retriever):