Tesneem commited on
Commit
4879f09
·
verified ·
1 Parent(s): 647a923

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -2
app.py CHANGED
@@ -205,9 +205,18 @@ def generate_response(input_dict):
205
  **inputs,
206
  max_new_tokens=512,
207
  temperature=0.7,
208
- do_sample=True
 
209
  )
210
- return tokenizer.decode(outputs[0], skip_special_tokens=True).split("QUESTION:")[-1].strip()
 
 
 
 
 
 
 
 
211
 
212
 
213
 
 
205
  **inputs,
206
  max_new_tokens=512,
207
  temperature=0.7,
208
+ do_sample=True,
209
+ pad_token_id=tokenizer.eos_token_id # avoids warning
210
  )
211
+ decoded = tokenizer.decode(outputs[0], skip_special_tokens=True)
212
+
213
+ # Print for debugging
214
+ print("🔍 Full LLM Output:\n", decoded)
215
+
216
+
217
+ response_only = decoded[len(prompt):].strip()
218
+ return response_only
219
+
220
 
221
 
222