Someman commited on
Commit
15f1cce
·
1 Parent(s): 953fde8

Update app.py

Browse files

fixed! batch_decode(Bug)

Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -19,7 +19,7 @@ def generate_output(prompt, input, kwargs):
19
  text = prompt + input
20
  inputs = tokenizer(text, return_tensors="pt")
21
  generate = model.generate(**inputs, **kwargs)
22
- output = tokenizer.batch_decodee(
23
  generate[:, inputs.input_ids.shape[1] :], skip_special_tokens=True
24
  )
25
  return output[0].split("\n\n")[0].strip()
 
19
  text = prompt + input
20
  inputs = tokenizer(text, return_tensors="pt")
21
  generate = model.generate(**inputs, **kwargs)
22
+ output = tokenizer.batch_decode(
23
  generate[:, inputs.input_ids.shape[1] :], skip_special_tokens=True
24
  )
25
  return output[0].split("\n\n")[0].strip()