Spaces:
Runtime error
Runtime error
# Install dependencies (Run this in Google Colab) | |
!pip install transformers gradio --quiet | |
# Import libraries | |
from transformers import MarianMTModel, MarianTokenizer | |
import gradio as gr | |
# Define model names for English↔Urdu translation | |
en_to_ur_model_name = "Helsinki-NLP/opus-mt-en-ur" | |
ur_to_en_model_name = "Helsinki-NLP/opus-mt-ur-en" | |
# Load models and tokenizers | |
en_to_ur_tokenizer = MarianTokenizer.from_pretrained(en_to_ur_model_name) | |
en_to_ur_model = MarianMTModel.from_pretrained(en_to_ur_model_name) | |
ur_to_en_tokenizer = MarianTokenizer.from_pretrained(ur_to_en_model_name) | |
ur_to_en_model = MarianMTModel.from_pretrained(ur_to_en_model_name) | |
# Translation functions | |
def translate_en_to_ur(text): | |
inputs = en_to_ur_tokenizer(text, return_tensors="pt", padding=True, truncation=True) | |
translated = en_to_ur_model.generate(**inputs) | |
return en_to_ur_tokenizer.decode(translated[0], skip_special_tokens=True) | |
def translate_ur_to_en(text): | |
inputs = ur_to_en_tokenizer(text, return_tensors="pt", padding=True, truncation=True) | |
translated = ur_to_en_model.generate(**inputs) | |
return ur_to_en_tokenizer.decode(translated[0], skip_special_tokens=True) | |
# Gradio interface | |
with gr.Blocks() as demo: | |
gr.Markdown("## 🈯 English ↔ Urdu Translator") | |
with gr.Tab("English ➜ Urdu"): | |
en_input = gr.Textbox(label="Enter English Text") | |
en_output = gr.Textbox(label="Translated Urdu Text") | |
en_translate_btn = gr.Button("Translate to Urdu") | |
en_translate_btn.click(translate_en_to_ur, en_input, en_output) | |
with gr.Tab("Urdu ➜ English"): | |
ur_input = gr.Textbox(label="اردو متن داخل کریں") | |
ur_output = gr.Textbox(label="Translated English Text") | |
ur_translate_btn = gr.Button("Translate to English") | |
ur_translate_btn.click(translate_ur_to_en, ur_input, ur_output) | |
# Launch the app (for local testing or in Colab) | |
if __name__ == "__main__": | |
demo.launch() | |