Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -283,7 +283,7 @@ PROMPTS:
|
|
| 283 |
inputs = tokenizer(prompt, return_tensors="pt", truncation=True)
|
| 284 |
outputs = model.generate(
|
| 285 |
**inputs,
|
| 286 |
-
max_new_tokens=min(
|
| 287 |
temperature=0.3,
|
| 288 |
do_sample=False,
|
| 289 |
pad_token_id=tokenizer.eos_token_id
|
|
|
|
| 283 |
inputs = tokenizer(prompt, return_tensors="pt", truncation=True)
|
| 284 |
outputs = model.generate(
|
| 285 |
**inputs,
|
| 286 |
+
max_new_tokens=min(max_tokens,512),
|
| 287 |
temperature=0.3,
|
| 288 |
do_sample=False,
|
| 289 |
pad_token_id=tokenizer.eos_token_id
|