Spaces:
Running
Running
Commit
·
f91c1eb
1
Parent(s):
17bea6b
fix I hope
Browse files- .gradio/flagged/dataset2.csv +2 -2
- app.py +3 -2
.gradio/flagged/dataset2.csv
CHANGED
@@ -1,2 +1,2 @@
|
|
1 |
-
How shall Codice Da Vinci help today?,Code Style,🧾 Leonardo's Work,timestamp
|
2 |
-
"make a script to compute fibonacci numbers, no comments please",Clean & Pythonic,,2025-04-19 15:21:21.859616
|
|
|
1 |
+
How shall Codice Da Vinci help today?,Code Style,🧾 Leonardo's Work,timestamp
|
2 |
+
"make a script to compute fibonacci numbers, no comments please",Clean & Pythonic,,2025-04-19 15:21:21.859616
|
app.py
CHANGED
@@ -20,11 +20,12 @@ def generate_code(prompt, style="Clean & Pythonic"):
|
|
20 |
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
|
21 |
outputs = model.generate(**inputs,
|
22 |
# max_new_tokens=100,
|
23 |
-
max_new_tokens=
|
24 |
do_sample=True,
|
25 |
temperature=1.0,
|
26 |
top_p=0.95,
|
27 |
-
eos_token_id=tokenizer.eos_token_id
|
|
|
28 |
return tokenizer.decode(outputs[0], skip_special_tokens=True)
|
29 |
|
30 |
demo = gr.Interface(
|
|
|
20 |
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
|
21 |
outputs = model.generate(**inputs,
|
22 |
# max_new_tokens=100,
|
23 |
+
max_new_tokens=500,
|
24 |
do_sample=True,
|
25 |
temperature=1.0,
|
26 |
top_p=0.95,
|
27 |
+
# eos_token_id=tokenizer.eos_token_id
|
28 |
+
)
|
29 |
return tokenizer.decode(outputs[0], skip_special_tokens=True)
|
30 |
|
31 |
demo = gr.Interface(
|