nnmthuw commited on
Commit
3631b92
·
1 Parent(s): 3cecaab

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -4
app.py CHANGED
@@ -1,10 +1,9 @@
1
- import torch
2
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
3
  import gradio as gr
4
 
5
  def envit5_translation(text):
6
  inputs = [f"en: {text}"]
7
- outputs = model.generate(tokenizer(inputs, return_tensors="pt", padding=True).input_ids.to(device), max_length=512)
8
  results = tokenizer.batch_decode(outputs, skip_special_tokens=True)
9
  return results[0][4:]
10
 
@@ -22,10 +21,9 @@ def translation(text):
22
  return (output1, output2, output3)
23
 
24
  if __name__ == "__main__":
25
- device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
26
  model_name = "VietAI/envit5-translation"
27
  tokenizer = AutoTokenizer.from_pretrained(model_name)
28
- model = AutoModelForSeq2SeqLM.from_pretrained(model_name).to(device)
29
 
30
  inputs = [
31
  "textbox"
 
 
1
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
2
  import gradio as gr
3
 
4
  def envit5_translation(text):
5
  inputs = [f"en: {text}"]
6
+ outputs = model.generate(tokenizer(inputs, return_tensors="pt", padding=True).input_ids, max_length=512)
7
  results = tokenizer.batch_decode(outputs, skip_special_tokens=True)
8
  return results[0][4:]
9
 
 
21
  return (output1, output2, output3)
22
 
23
  if __name__ == "__main__":
 
24
  model_name = "VietAI/envit5-translation"
25
  tokenizer = AutoTokenizer.from_pretrained(model_name)
26
+ model = AutoModelForSeq2SeqLM.from_pretrained(model_name)
27
 
28
  inputs = [
29
  "textbox"