ciyidogan commited on
Commit
01114e6
·
verified ·
1 Parent(s): de57a3a

Update fine_tune_inference_test.py

Browse files
Files changed (1) hide show
  1. fine_tune_inference_test.py +17 -12
fine_tune_inference_test.py CHANGED
@@ -94,18 +94,23 @@ def chat(msg: Message):
94
  log(f"🔢 Tokenizer input_ids: {inputs['input_ids']}")
95
  log(f"📏 input shape: {inputs['input_ids'].shape}")
96
 
97
- with torch.no_grad():
98
- output = model.generate(
99
- **inputs,
100
- max_new_tokens=200,
101
- do_sample=True,
102
- temperature=0.7,
103
- top_k=50,
104
- top_p=0.95,
105
- return_dict_in_generate=True,
106
- output_scores=True,
107
- suppress_tokens=[tokenizer.pad_token_id] if tokenizer.pad_token_id is not None else None
108
- )
 
 
 
 
 
109
 
110
  generated_ids = output.sequences[0]
111
  generated_text = tokenizer.decode(generated_ids, skip_special_tokens=True)
 
94
  log(f"🔢 Tokenizer input_ids: {inputs['input_ids']}")
95
  log(f"📏 input shape: {inputs['input_ids'].shape}")
96
 
97
+ try:
98
+ with torch.no_grad():
99
+ output = model.generate(
100
+ **inputs,
101
+ max_new_tokens=200,
102
+ do_sample=True,
103
+ temperature=0.7,
104
+ top_k=50,
105
+ top_p=0.95,
106
+ return_dict_in_generate=True,
107
+ output_scores=True,
108
+ suppress_tokens=[tokenizer.pad_token_id] if tokenizer.pad_token_id is not None else None
109
+ )
110
+ except Exception as e:
111
+ log(f"⚠️ generate() sırasında hata: {e}")
112
+ fallback = random.choice(FALLBACK_ANSWERS)
113
+ return {"answer": fallback, "chat_history": chat_history}
114
 
115
  generated_ids = output.sequences[0]
116
  generated_text = tokenizer.decode(generated_ids, skip_special_tokens=True)