import display_gloss as dg import synonyms_preprocess as sp from NLP_Spacy_base_translator import NlpSpacyBaseTranslator from flask import Flask, render_template, Response, request import requests app = Flask(__name__) # Initialize data nlp, dict_docs_spacy = sp.load_spacy_values() dataset, list_2000_tokens = dg.load_data() def translate_korean_to_english(text): url = "https://translate.googleapis.com/translate_a/single" params = { "client": "gtx", "sl": "ko", "tl": "en", "dt": ["t", "bd"], # Added "bd" for better translation "q": text } try: response = requests.get(url, params=params) translation = response.json()[0][0][0] # Basic post-processing translation = translation.replace("It is", "").replace("There is", "").strip() return translation except Exception as e: print(f"Translation error: {e}") return text def process_gloss_conversion(english_text): # Custom mapping for common Korean-specific terms term_mapping = { "Korea": "KOREA", "Seoul": "SEOUL", "four seasons": "FOUR SEASON", "beautiful": "BEAUTIFUL", "country": "COUNTRY" } # Apply mappings before ASL conversion for term, replacement in term_mapping.items(): english_text = english_text.replace(term.lower(), replacement) return english_text @app.route('/') def index(): return render_template('index.html') @app.route('/translate/', methods=['POST']) def result(): if request.method == 'POST': input_text = request.form['inputSentence'] # Check if input is Korean (simplified check) is_korean = any(ord('가') <= ord(char) <= ord('힣') for char in input_text) if is_korean: english_translation = translate_korean_to_english(input_text) else: english_translation = input_text # Pre-process for better ASL conversion processed_english = process_gloss_conversion(english_translation) # Convert to ASL eng_to_asl_translator = NlpSpacyBaseTranslator(sentence=processed_english) generated_gloss = eng_to_asl_translator.translate_to_gloss() gloss_list_lower = [gloss.lower() for gloss in generated_gloss.split() if gloss.isalnum()] gloss_sentence_before_synonym = " ".join(gloss_list_lower) # Apply custom synonym rules gloss_list = [sp.find_synonyms(gloss, nlp, dict_docs_spacy, list_2000_tokens) for gloss in gloss_list_lower] gloss_sentence_after_synonym = " ".join(gloss_list) return render_template('result.html', original_sentence=input_text, english_translation=english_translation, gloss_sentence_before_synonym=gloss_sentence_before_synonym, gloss_sentence_after_synonym=gloss_sentence_after_synonym) @app.route('/video_feed') def video_feed(): sentence = request.args.get('gloss_sentence_to_display', '') gloss_list = sentence.split() return Response(dg.generate_video(gloss_list, dataset, list_2000_tokens), mimetype='multipart/x-mixed-replace; boundary=frame') if __name__ == "__main__": app.run(host="0.0.0.0", port=5000, debug=True)