Facelook commited on
Commit
2c89664
·
1 Parent(s): b918222

Trial and error.

Browse files
Files changed (1) hide show
  1. app.py +7 -1
app.py CHANGED
@@ -46,7 +46,9 @@ class BasicAgent:
46
  {"role": "system", "content": "You are Qwen, created by Alibaba Cloud. You are a helpful assistant."},
47
  {"role": "user", "content": question}
48
  ]
49
-
 
 
50
  # Generate response
51
  if self.model and self.tokenizer:
52
  # Local model generation
@@ -57,6 +59,8 @@ class BasicAgent:
57
  )
58
  model_inputs = self.tokenizer([text], return_tensors="pt").to(self.model.device)
59
 
 
 
60
  generated_ids = self.model.generate(
61
  **model_inputs,
62
  max_new_tokens=512
@@ -65,6 +69,8 @@ class BasicAgent:
65
  output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)
66
  ]
67
 
 
 
68
  answer = self.tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
69
  else:
70
  # Fallback to Inference API
 
46
  {"role": "system", "content": "You are Qwen, created by Alibaba Cloud. You are a helpful assistant."},
47
  {"role": "user", "content": question}
48
  ]
49
+
50
+ print(f"Messages prepared for model: {messages}")
51
+
52
  # Generate response
53
  if self.model and self.tokenizer:
54
  # Local model generation
 
59
  )
60
  model_inputs = self.tokenizer([text], return_tensors="pt").to(self.model.device)
61
 
62
+ print(f"Model inputs prepared: {model_inputs}")
63
+
64
  generated_ids = self.model.generate(
65
  **model_inputs,
66
  max_new_tokens=512
 
69
  output_ids[len(input_ids):] for input_ids, output_ids in zip(model_inputs.input_ids, generated_ids)
70
  ]
71
 
72
+ print(f"Generated IDs: {generated_ids}")
73
+
74
  answer = self.tokenizer.batch_decode(generated_ids, skip_special_tokens=True)[0]
75
  else:
76
  # Fallback to Inference API