Update app.py
Browse files
app.py
CHANGED
@@ -24,14 +24,10 @@ CUDA_AVAILABLE = torch.cuda.is_available()
|
|
24 |
device = "cuda" if CUDA_AVAILABLE else "cpu"
|
25 |
logger.info(f"Using hardware: {device}")
|
26 |
|
27 |
-
#
|
28 |
-
MODEL_CACHE_DIR = os.path.join(os.path.dirname(__file__), "model_cache")
|
29 |
-
os.makedirs(MODEL_CACHE_DIR, exist_ok=True)
|
30 |
-
|
31 |
-
# Load a single model instance with caching
|
32 |
try:
|
33 |
start_time = time.time()
|
34 |
-
model = KModel("hexgrad/Kokoro-82M"
|
35 |
logger.info(f"Model loading time: {time.time() - start_time} seconds")
|
36 |
except Exception as e:
|
37 |
logger.error(f"Failed to load model: {e}")
|
@@ -89,6 +85,9 @@ for label, voice_path in CHOICES.items():
|
|
89 |
|
90 |
def generate_first(text, voice="af_bella.pt", speed=1, use_gpu=CUDA_AVAILABLE):
|
91 |
start_time = time.time()
|
|
|
|
|
|
|
92 |
voice_path = os.path.join(VOICE_DIR, voice)
|
93 |
if not os.path.exists(voice_path):
|
94 |
raise FileNotFoundError(f"Voice file not found: {voice_path}")
|
@@ -116,6 +115,9 @@ def generate_first(text, voice="af_bella.pt", speed=1, use_gpu=CUDA_AVAILABLE):
|
|
116 |
return None, ""
|
117 |
|
118 |
def tokenize_first(text, voice="af_bella.pt"):
|
|
|
|
|
|
|
119 |
voice_path = os.path.join(VOICE_DIR, voice)
|
120 |
if not os.path.exists(voice_path):
|
121 |
raise FileNotFoundError(f"Voice file not found: {voice_path}")
|
@@ -128,6 +130,9 @@ def tokenize_first(text, voice="af_bella.pt"):
|
|
128 |
|
129 |
def generate_all(text, voice="af_bella.pt", speed=1, use_gpu=CUDA_AVAILABLE):
|
130 |
start_time = time.time()
|
|
|
|
|
|
|
131 |
voice_path = os.path.join(VOICE_DIR, voice)
|
132 |
if not os.path.exists(voice_path):
|
133 |
raise FileNotFoundError(f"Voice file not found: {voice_path}")
|
@@ -158,7 +163,7 @@ TOKEN_NOTE = '''
|
|
158 |
with gr.Blocks(theme="soft") as app:
|
159 |
with gr.Row():
|
160 |
with gr.Column():
|
161 |
-
text = gr.Textbox(label="Input Text", value=TEXT, info="Arbitrarily many characters supported")
|
162 |
with gr.Row():
|
163 |
voice = gr.Dropdown(list(CHOICES.items()), value="af_bella.pt" if "af_bella.pt" in CHOICES.values() else list(CHOICES.values())[0], label="Voice",
|
164 |
info="Quality and availability vary by language")
|
|
|
24 |
device = "cuda" if CUDA_AVAILABLE else "cpu"
|
25 |
logger.info(f"Using hardware: {device}")
|
26 |
|
27 |
+
# Load a single model instance
|
|
|
|
|
|
|
|
|
28 |
try:
|
29 |
start_time = time.time()
|
30 |
+
model = KModel("hexgrad/Kokoro-82M").to(device).eval()
|
31 |
logger.info(f"Model loading time: {time.time() - start_time} seconds")
|
32 |
except Exception as e:
|
33 |
logger.error(f"Failed to load model: {e}")
|
|
|
85 |
|
86 |
def generate_first(text, voice="af_bella.pt", speed=1, use_gpu=CUDA_AVAILABLE):
|
87 |
start_time = time.time()
|
88 |
+
if len(text) > 510:
|
89 |
+
text = text[:510]
|
90 |
+
gr.Warning("Text truncated to 510 characters for performance.")
|
91 |
voice_path = os.path.join(VOICE_DIR, voice)
|
92 |
if not os.path.exists(voice_path):
|
93 |
raise FileNotFoundError(f"Voice file not found: {voice_path}")
|
|
|
115 |
return None, ""
|
116 |
|
117 |
def tokenize_first(text, voice="af_bella.pt"):
|
118 |
+
if len(text) > 510:
|
119 |
+
text = text[:510]
|
120 |
+
gr.Warning("Text truncated to 510 characters for performance.")
|
121 |
voice_path = os.path.join(VOICE_DIR, voice)
|
122 |
if not os.path.exists(voice_path):
|
123 |
raise FileNotFoundError(f"Voice file not found: {voice_path}")
|
|
|
130 |
|
131 |
def generate_all(text, voice="af_bella.pt", speed=1, use_gpu=CUDA_AVAILABLE):
|
132 |
start_time = time.time()
|
133 |
+
if len(text) > 510:
|
134 |
+
text = text[:510]
|
135 |
+
gr.Warning("Text truncated to 510 characters for performance.")
|
136 |
voice_path = os.path.join(VOICE_DIR, voice)
|
137 |
if not os.path.exists(voice_path):
|
138 |
raise FileNotFoundError(f"Voice file not found: {voice_path}")
|
|
|
163 |
with gr.Blocks(theme="soft") as app:
|
164 |
with gr.Row():
|
165 |
with gr.Column():
|
166 |
+
text = gr.Textbox(label="Input Text", value=TEXT, info="Arbitrarily many characters supported (max 510)")
|
167 |
with gr.Row():
|
168 |
voice = gr.Dropdown(list(CHOICES.items()), value="af_bella.pt" if "af_bella.pt" in CHOICES.values() else list(CHOICES.values())[0], label="Voice",
|
169 |
info="Quality and availability vary by language")
|