Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -5,16 +5,29 @@ import base64
|
|
5 |
import threading
|
6 |
from huggingface_hub import login, hf_hub_download
|
7 |
|
|
|
8 |
login(token=os.environ["HF_TOKEN"])
|
9 |
repo_id = os.environ["REPO_ID"]
|
10 |
|
|
|
11 |
import torch
|
12 |
torch.set_num_threads(1)
|
13 |
if torch.cuda.is_available():
|
14 |
torch.backends.cudnn.benchmark = True
|
15 |
|
|
|
16 |
try:
|
17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
|
19 |
for filename in files_to_download:
|
20 |
try:
|
@@ -31,9 +44,11 @@ try:
|
|
31 |
except Exception as e:
|
32 |
print(f"Error downloading files: {e}")
|
33 |
|
|
|
34 |
sys.path.append('.')
|
35 |
from generate import chat_interface, init_model
|
36 |
|
|
|
37 |
LOGO_PATH = "static/logo.png"
|
38 |
if os.path.isfile(LOGO_PATH):
|
39 |
with open(LOGO_PATH, "rb") as f:
|
@@ -45,6 +60,7 @@ else:
|
|
45 |
FAVICON_HTML = ''
|
46 |
|
47 |
def init_translation_model():
|
|
|
48 |
success = init_model()
|
49 |
if success:
|
50 |
print("Model loaded successfully!")
|
@@ -52,10 +68,19 @@ def init_translation_model():
|
|
52 |
print("Failed to load model")
|
53 |
|
54 |
def translate_text(source_text, source_lang, target_lang, temperature, top_k, repetition_penalty, max_tokens):
|
55 |
-
|
|
|
|
|
|
|
|
|
56 |
|
|
|
57 |
languages = ["English", "Hindi", "Bengali", "Tamil", "Telugu", "Kannada", "Panjabi"]
|
58 |
|
|
|
|
|
|
|
|
|
59 |
theme_lock_css = """
|
60 |
.gradio-container .theme-toggle,
|
61 |
.gradio-container button[aria-label*="theme"],
|
@@ -73,9 +98,14 @@ body, .gradio-container {
|
|
73 |
}
|
74 |
"""
|
75 |
|
76 |
-
combined_css = theme_lock_css
|
77 |
|
78 |
-
|
|
|
|
|
|
|
|
|
|
|
79 |
background_fill_primary="#0a1628",
|
80 |
background_fill_secondary="#1f2937",
|
81 |
block_background_fill="#374151",
|
@@ -133,6 +163,7 @@ with gr.Blocks(
|
|
133 |
""")
|
134 |
|
135 |
with gr.Row(equal_height=False):
|
|
|
136 |
with gr.Column(scale=1.5, elem_classes="settings-panel"):
|
137 |
gr.Markdown("## Translation Settings")
|
138 |
with gr.Row():
|
@@ -148,11 +179,22 @@ with gr.Blocks(
|
|
148 |
|
149 |
with gr.Column(scale=2, elem_classes="translation-card"):
|
150 |
gr.Markdown("## Translation Interface")
|
151 |
-
source_text = gr.Textbox(
|
|
|
|
|
|
|
|
|
|
|
152 |
with gr.Row():
|
153 |
translate_btn = gr.Button("Translate", variant="primary", size="lg")
|
154 |
clear_btn = gr.Button("Clear All", variant="secondary", size="lg")
|
155 |
-
translated_text = gr.Textbox(
|
|
|
|
|
|
|
|
|
|
|
|
|
156 |
|
157 |
with gr.Row():
|
158 |
with gr.Column():
|
@@ -168,15 +210,30 @@ with gr.Blocks(
|
|
168 |
inputs=[source_text, source_lang, target_lang],
|
169 |
)
|
170 |
|
|
|
171 |
def swap_languages(src, tgt):
|
172 |
return tgt, src
|
173 |
|
174 |
def clear_all():
|
175 |
return "", ""
|
176 |
|
177 |
-
swap_btn.click(
|
178 |
-
|
179 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
180 |
demo.load(fn=init_translation_model)
|
181 |
|
182 |
if __name__ == "__main__":
|
|
|
5 |
import threading
|
6 |
from huggingface_hub import login, hf_hub_download
|
7 |
|
8 |
+
# Setup HuggingFace authentication and download
|
9 |
login(token=os.environ["HF_TOKEN"])
|
10 |
repo_id = os.environ["REPO_ID"]
|
11 |
|
12 |
+
# Torch optimizations
|
13 |
import torch
|
14 |
torch.set_num_threads(1)
|
15 |
if torch.cuda.is_available():
|
16 |
torch.backends.cudnn.benchmark = True
|
17 |
|
18 |
+
# Download generation logic and dependencies from private repo
|
19 |
try:
|
20 |
+
# Download all required files
|
21 |
+
files_to_download = [
|
22 |
+
"generate.py",
|
23 |
+
"model.py",
|
24 |
+
"hfmod_sft.py",
|
25 |
+
"tokenizer.json",
|
26 |
+
"tokenizer_config.json",
|
27 |
+
"special_tokens_map.json",
|
28 |
+
"vocab.json",
|
29 |
+
"merges.txt"
|
30 |
+
]
|
31 |
|
32 |
for filename in files_to_download:
|
33 |
try:
|
|
|
44 |
except Exception as e:
|
45 |
print(f"Error downloading files: {e}")
|
46 |
|
47 |
+
# Import the generation function
|
48 |
sys.path.append('.')
|
49 |
from generate import chat_interface, init_model
|
50 |
|
51 |
+
# Logo and favicon setup
|
52 |
LOGO_PATH = "static/logo.png"
|
53 |
if os.path.isfile(LOGO_PATH):
|
54 |
with open(LOGO_PATH, "rb") as f:
|
|
|
60 |
FAVICON_HTML = ''
|
61 |
|
62 |
def init_translation_model():
|
63 |
+
"""Initialize the translation model"""
|
64 |
success = init_model()
|
65 |
if success:
|
66 |
print("Model loaded successfully!")
|
|
|
68 |
print("Failed to load model")
|
69 |
|
70 |
def translate_text(source_text, source_lang, target_lang, temperature, top_k, repetition_penalty, max_tokens):
|
71 |
+
"""Handle translation requests"""
|
72 |
+
return chat_interface(
|
73 |
+
source_text, source_lang, target_lang,
|
74 |
+
temperature, top_k, repetition_penalty, max_tokens
|
75 |
+
)
|
76 |
|
77 |
+
# Language options
|
78 |
languages = ["English", "Hindi", "Bengali", "Tamil", "Telugu", "Kannada", "Panjabi"]
|
79 |
|
80 |
+
# Custom CSS
|
81 |
+
css_path = "static/style.css"
|
82 |
+
custom_css = open(css_path, encoding="utf-8").read() if os.path.isfile(css_path) else ""
|
83 |
+
|
84 |
theme_lock_css = """
|
85 |
.gradio-container .theme-toggle,
|
86 |
.gradio-container button[aria-label*="theme"],
|
|
|
98 |
}
|
99 |
"""
|
100 |
|
101 |
+
combined_css = custom_css + theme_lock_css
|
102 |
|
103 |
+
# Theme configuration
|
104 |
+
locked_theme = gr.themes.Monochrome(
|
105 |
+
primary_hue="blue",
|
106 |
+
secondary_hue="slate",
|
107 |
+
neutral_hue="slate"
|
108 |
+
).set(
|
109 |
background_fill_primary="#0a1628",
|
110 |
background_fill_secondary="#1f2937",
|
111 |
block_background_fill="#374151",
|
|
|
163 |
""")
|
164 |
|
165 |
with gr.Row(equal_height=False):
|
166 |
+
# Settings panel
|
167 |
with gr.Column(scale=1.5, elem_classes="settings-panel"):
|
168 |
gr.Markdown("## Translation Settings")
|
169 |
with gr.Row():
|
|
|
179 |
|
180 |
with gr.Column(scale=2, elem_classes="translation-card"):
|
181 |
gr.Markdown("## Translation Interface")
|
182 |
+
source_text = gr.Textbox(
|
183 |
+
label="Enter text to translate",
|
184 |
+
placeholder="Type or paste your text here",
|
185 |
+
lines=6,
|
186 |
+
max_lines=12
|
187 |
+
)
|
188 |
with gr.Row():
|
189 |
translate_btn = gr.Button("Translate", variant="primary", size="lg")
|
190 |
clear_btn = gr.Button("Clear All", variant="secondary", size="lg")
|
191 |
+
translated_text = gr.Textbox(
|
192 |
+
label="Translation Result",
|
193 |
+
lines=6,
|
194 |
+
max_lines=12,
|
195 |
+
interactive=False,
|
196 |
+
placeholder="Translation will appear here"
|
197 |
+
)
|
198 |
|
199 |
with gr.Row():
|
200 |
with gr.Column():
|
|
|
210 |
inputs=[source_text, source_lang, target_lang],
|
211 |
)
|
212 |
|
213 |
+
# Event handlers
|
214 |
def swap_languages(src, tgt):
|
215 |
return tgt, src
|
216 |
|
217 |
def clear_all():
|
218 |
return "", ""
|
219 |
|
220 |
+
swap_btn.click(
|
221 |
+
fn=swap_languages,
|
222 |
+
inputs=[source_lang, target_lang],
|
223 |
+
outputs=[source_lang, target_lang]
|
224 |
+
)
|
225 |
+
|
226 |
+
clear_btn.click(
|
227 |
+
fn=clear_all,
|
228 |
+
outputs=[source_text, translated_text]
|
229 |
+
)
|
230 |
+
|
231 |
+
translate_btn.click(
|
232 |
+
fn=translate_text,
|
233 |
+
inputs=[source_text, source_lang, target_lang, temperature, top_k, repetition_penalty, max_tokens],
|
234 |
+
outputs=[translated_text]
|
235 |
+
)
|
236 |
+
|
237 |
demo.load(fn=init_translation_model)
|
238 |
|
239 |
if __name__ == "__main__":
|