Spaces:
Build error
Build error
debugging
Browse files
app.py
CHANGED
@@ -22,18 +22,19 @@ def chat(message, history):
|
|
22 |
|
23 |
|
24 |
def feedback(text):
|
25 |
-
# tokenized_phrases = grammar_tokenizer([text], return_tensors='pt', padding=True)
|
26 |
-
# corrections = grammar_model.generate(**tokenized_phrases)
|
27 |
-
# corrections = grammar_tokenizer.batch_decode(corrections, skip_special_tokens=True)
|
28 |
-
batch = grammar_tokenizer([text],truncation=True,padding='max_length',max_length=64, return_tensors="pt").to(torch_device)
|
29 |
-
corrections= grammar_model.generate(**batch,max_length=64,num_beams=2, num_return_sequences=num_return_sequences, temperature=1.5)
|
30 |
-
print("The corrections are: ", corrections)
|
31 |
-
if len(corrections) == 0:
|
32 |
-
|
33 |
-
else:
|
34 |
-
|
35 |
-
|
36 |
-
return f'FEEDBACK: {feedback}'
|
|
|
37 |
|
38 |
iface = gr.Interface(
|
39 |
chat,
|
|
|
22 |
|
23 |
|
24 |
def feedback(text):
|
25 |
+
# # tokenized_phrases = grammar_tokenizer([text], return_tensors='pt', padding=True)
|
26 |
+
# # corrections = grammar_model.generate(**tokenized_phrases)
|
27 |
+
# # corrections = grammar_tokenizer.batch_decode(corrections, skip_special_tokens=True)
|
28 |
+
# batch = grammar_tokenizer([text],truncation=True,padding='max_length',max_length=64, return_tensors="pt").to(torch_device)
|
29 |
+
# corrections= grammar_model.generate(**batch,max_length=64,num_beams=2, num_return_sequences=num_return_sequences, temperature=1.5)
|
30 |
+
# print("The corrections are: ", corrections)
|
31 |
+
# if len(corrections) == 0:
|
32 |
+
# feedback = f'Looks good! Keep up the good work'
|
33 |
+
# else:
|
34 |
+
# suggestion = tokenizer.batch_decode(corrections[0], skip_special_tokens=True)
|
35 |
+
# feedback = f'\'{suggestion}\' might be a little better'
|
36 |
+
# return f'FEEDBACK: {feedback}'
|
37 |
+
return "Great stuff"
|
38 |
|
39 |
iface = gr.Interface(
|
40 |
chat,
|