Spaces:
Sleeping
Sleeping
File size: 1,833 Bytes
f3e7e87 3556e20 f3e7e87 3556e20 f3e7e87 3556e20 f3e7e87 3556e20 f3e7e87 3556e20 f3e7e87 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
from transformers import MarianMTModel, MarianTokenizer
import gradio as gr
# Load models and tokenizers
en_to_ur_model_name = "Helsinki-NLP/opus-mt-en-ur"
ur_to_en_model_name = "Helsinki-NLP/opus-mt-ur-en"
en_to_ur_tokenizer = MarianTokenizer.from_pretrained(en_to_ur_model_name)
en_to_ur_model = MarianMTModel.from_pretrained(en_to_ur_model_name)
ur_to_en_tokenizer = MarianTokenizer.from_pretrained(ur_to_en_model_name)
ur_to_en_model = MarianMTModel.from_pretrained(ur_to_en_model_name)
# Translation functions
def translate_en_to_ur(text):
if not text.strip():
return "Please enter some English text."
inputs = en_to_ur_tokenizer(text, return_tensors="pt", padding=True)
translated = en_to_ur_model.generate(**inputs)
return en_to_ur_tokenizer.decode(translated[0], skip_special_tokens=True)
def translate_ur_to_en(text):
if not text.strip():
return "براہ کرم کچھ اردو متن درج کریں۔"
inputs = ur_to_en_tokenizer(text, return_tensors="pt", padding=True)
translated = ur_to_en_model.generate(**inputs)
return ur_to_en_tokenizer.decode(translated[0], skip_special_tokens=True)
# Gradio UI
with gr.Blocks() as demo:
gr.Markdown("## English ↔ Urdu Translator")
with gr.Tab("English to Urdu"):
en_input = gr.Textbox(label="Enter English Text")
en_output = gr.Textbox(label="Urdu Translation")
en_button = gr.Button("Translate")
en_button.click(fn=translate_en_to_ur, inputs=en_input, outputs=en_output)
with gr.Tab("Urdu to English"):
ur_input = gr.Textbox(label="اردو متن درج کریں")
ur_output = gr.Textbox(label="English Translation")
ur_button = gr.Button("Translate")
ur_button.click(fn=translate_ur_to_en, inputs=ur_input, outputs=ur_output)
demo.launch()
|