masadonline commited on
Commit
6490802
·
verified ·
1 Parent(s): 0a83fa4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -53
app.py CHANGED
@@ -1,65 +1,57 @@
1
  import streamlit as st
2
- from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, pipeline
3
- from PIL import Image
4
  import requests
 
 
5
 
6
- # Load tokenizer and model for Urdu to English translation
7
- tokenizer = AutoTokenizer.from_pretrained("Helsinki-NLP/opus-mt-ur-en")
8
- model = AutoModelForSeq2SeqLM.from_pretrained("Helsinki-NLP/opus-mt-ur-en")
9
 
10
- # Load CLIPVision model for ASL fingerspelling recognition
11
- asl_model = pipeline("image-classification", model="aalof/clipvision-asl-fingerspelling")
 
12
 
13
- # Function to translate Urdu to English
14
- def translate_urdu_to_english(urdu_text):
15
- inputs = tokenizer(urdu_text, return_tensors="pt", padding=True)
16
- outputs = model.generate(**inputs)
17
- translated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
18
- return translated_text
19
 
20
- # Function to classify ASL fingerspelling
21
- def classify_asl_image(image):
22
- result = asl_model(image)
23
- return result[0]['label']
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
  # Streamlit UI
26
- st.title("🤟 Urdu to ASL Fingerspelling Translator")
 
 
27
 
28
- # Urdu Text Input
29
- urdu_text = st.text_area("✍️ Enter Urdu text:")
30
 
31
- if st.button("🔁 Translate & Show ASL"):
32
  if not urdu_text.strip():
33
- st.warning("Please enter some Urdu text first.")
34
  else:
35
- with st.spinner("Translating Urdu to English..."):
36
- translated_text = translate_urdu_to_english(urdu_text.strip())
37
-
38
- st.success("Translated to English:")
39
- st.markdown(f"**🔤 {translated_text}**")
40
-
41
- st.markdown("**🤟 ASL Fingerspelling Representation (A–Z):**")
42
- # Display fingerspelling for each character in translated text
43
- for char in translated_text:
44
- if char.isalpha():
45
- image_url = f"https://raw.githubusercontent.com/madisonlang/sign-language-alphabet/main/images/{char.upper()}.jpg"
46
- try:
47
- img = Image.open(requests.get(image_url, stream=True).raw)
48
- st.image(img, caption=char.upper(), width=80)
49
- except Exception as e:
50
- st.warning(f"Missing image for: {char.upper()}")
51
-
52
- st.markdown("----")
53
- st.subheader("🖐️ Upload Hand Image for ASL Letter Prediction")
54
-
55
- # Image Upload for ASL Prediction
56
- uploaded_file = st.file_uploader("Upload an image of a single ASL fingerspelling gesture", type=["jpg", "jpeg", "png"])
57
-
58
- if uploaded_file:
59
- image = Image.open(uploaded_file)
60
- st.image(image, caption="Uploaded Image", width=300)
61
-
62
- with st.spinner("Classifying ASL gesture..."):
63
- asl_letter = classify_asl_image(image)
64
-
65
- st.success(f"Predicted ASL Letter: **{asl_letter}**")
 
1
  import streamlit as st
 
 
2
  import requests
3
+ import os
4
+ from PIL import Image
5
 
6
+ # Hugging Face model for Urdu to English translation (ensure it supports ur -> en)
7
+ HF_API_URL = "https://api-inference.huggingface.co/models/Helsinki-NLP/opus-mt-ur-en"
8
+ HF_API_TOKEN = os.getenv("HF_API_TOKEN") # Set this in your environment
9
 
10
+ headers = {
11
+ "Authorization": f"Bearer {HF_API_TOKEN}"
12
+ }
13
 
14
+ # Sign images folder (e.g., a.jpg, b.jpg, hello.jpg, etc.)
15
+ SIGN_IMAGE_FOLDER = "sign_images"
 
 
 
 
16
 
17
+ def translate_urdu_to_english(urdu_text):
18
+ payload = {"inputs": urdu_text}
19
+ response = requests.post(HF_API_URL, headers=headers, json=payload)
20
+ if response.status_code == 200:
21
+ return response.json()[0]['translation_text']
22
+ else:
23
+ st.error("Translation failed. Please try again.")
24
+ return ""
25
+
26
+ def display_sign_language(text):
27
+ words = text.lower().split()
28
+ for word in words:
29
+ img_path_word = os.path.join(SIGN_IMAGE_FOLDER, f"{word}.jpg")
30
+ if os.path.exists(img_path_word):
31
+ st.image(img_path_word, caption=word)
32
+ else:
33
+ # Fallback to letter-by-letter spelling
34
+ for char in word:
35
+ if char.isalpha():
36
+ img_path_char = os.path.join(SIGN_IMAGE_FOLDER, f"{char}.jpg")
37
+ if os.path.exists(img_path_char):
38
+ st.image(img_path_char, caption=char)
39
 
40
  # Streamlit UI
41
+ st.set_page_config(page_title="DeafTranslator", layout="centered")
42
+ st.title("🤟 DeafTranslator")
43
+ st.markdown("Translate **Urdu text** to **English** and see it in **sign language**.")
44
 
45
+ urdu_text = st.text_area("Enter Urdu Text", height=150)
 
46
 
47
+ if st.button("Translate to Sign Language"):
48
  if not urdu_text.strip():
49
+ st.warning("Please enter some Urdu text.")
50
  else:
51
+ with st.spinner("Translating..."):
52
+ english_text = translate_urdu_to_english(urdu_text)
53
+ if english_text:
54
+ st.subheader("Translated English Text")
55
+ st.success(english_text)
56
+ st.subheader("Sign Language Representation")
57
+ display_sign_language(english_text)