Delete app-backup1.py
Browse files- app-backup1.py +0 -236
app-backup1.py
DELETED
@@ -1,236 +0,0 @@
|
|
1 |
-
import spaces
|
2 |
-
import torch
|
3 |
-
import gradio as gr
|
4 |
-
from transformers import pipeline
|
5 |
-
from huggingface_hub import InferenceClient
|
6 |
-
import os
|
7 |
-
import numpy as np
|
8 |
-
from pydub import AudioSegment
|
9 |
-
import tempfile
|
10 |
-
import math
|
11 |
-
|
12 |
-
MODEL_NAME = "openai/whisper-large-v3-turbo"
|
13 |
-
BATCH_SIZE = 8
|
14 |
-
FILE_LIMIT_MB = 1000
|
15 |
-
CHUNK_LENGTH = 10 * 60 # 10๋ถ ๋จ์๋ก ๋ถํ
|
16 |
-
|
17 |
-
device = 0 if torch.cuda.is_available() else "cpu"
|
18 |
-
|
19 |
-
# Whisper ํ์ดํ๋ผ์ธ ์ด๊ธฐํ
|
20 |
-
pipe = pipeline(
|
21 |
-
task="automatic-speech-recognition",
|
22 |
-
model=MODEL_NAME,
|
23 |
-
chunk_length_s=30,
|
24 |
-
device=device,
|
25 |
-
)
|
26 |
-
|
27 |
-
# Hugging Face ์ถ๋ก ํด๋ผ์ด์ธํธ ์ค์
|
28 |
-
hf_client = InferenceClient(
|
29 |
-
"CohereForAI/c4ai-command-r-plus-08-2024",
|
30 |
-
token=os.getenv("HF_TOKEN")
|
31 |
-
)
|
32 |
-
|
33 |
-
def split_audio(audio_path, chunk_length=CHUNK_LENGTH):
|
34 |
-
"""์ค๋์ค ํ์ผ์ ์ฒญํฌ๋ก ๋ถํ """
|
35 |
-
audio = AudioSegment.from_file(audio_path)
|
36 |
-
duration = len(audio) / 1000 # ์ด ๋จ์ ๋ณํ
|
37 |
-
chunks = []
|
38 |
-
|
39 |
-
# ์ฒญํฌ ๊ฐ์ ๊ณ์ฐ
|
40 |
-
num_chunks = math.ceil(duration / chunk_length)
|
41 |
-
|
42 |
-
for i in range(num_chunks):
|
43 |
-
start_time = i * chunk_length * 1000 # milliseconds
|
44 |
-
end_time = min((i + 1) * chunk_length * 1000, len(audio))
|
45 |
-
|
46 |
-
chunk = audio[start_time:end_time]
|
47 |
-
|
48 |
-
# ์์ ํ์ผ๋ก ์ ์ฅ
|
49 |
-
with tempfile.NamedTemporaryFile(suffix='.wav', delete=False) as temp_file:
|
50 |
-
chunk.export(temp_file.name, format='wav')
|
51 |
-
chunks.append(temp_file.name)
|
52 |
-
|
53 |
-
return chunks, num_chunks
|
54 |
-
|
55 |
-
def process_chunk(chunk_path, task):
|
56 |
-
"""๊ฐ๋ณ ์ฒญํฌ ์ฒ๋ฆฌ"""
|
57 |
-
result = pipe(
|
58 |
-
chunk_path,
|
59 |
-
batch_size=BATCH_SIZE,
|
60 |
-
generate_kwargs={"task": task},
|
61 |
-
return_timestamps=True
|
62 |
-
)
|
63 |
-
|
64 |
-
# ์์ ํ์ผ ์ญ์
|
65 |
-
os.unlink(chunk_path)
|
66 |
-
|
67 |
-
return result["text"]
|
68 |
-
|
69 |
-
def update_progress(progress):
|
70 |
-
"""์งํ ์ํฉ ์
๋ฐ์ดํธ"""
|
71 |
-
return f"์ฒ๋ฆฌ ์ค... {progress}% ์๋ฃ"
|
72 |
-
|
73 |
-
@spaces.GPU
|
74 |
-
def transcribe_summarize(audio_input, task, progress=gr.Progress()):
|
75 |
-
if audio_input is None:
|
76 |
-
raise gr.Error("์ค๋์ค ํ์ผ์ด ์ ์ถ๋์ง ์์์ต๋๋ค!")
|
77 |
-
|
78 |
-
try:
|
79 |
-
# ์ค๋์ค ํ์ผ ๋ถํ
|
80 |
-
chunks, num_chunks = split_audio(audio_input)
|
81 |
-
progress(0, desc="์ค๋์ค ํ์ผ ๋ถํ ์๋ฃ")
|
82 |
-
|
83 |
-
# ๊ฐ ์ฒญํฌ ์ฒ๋ฆฌ
|
84 |
-
transcribed_texts = []
|
85 |
-
for i, chunk in enumerate(chunks):
|
86 |
-
chunk_text = process_chunk(chunk, task)
|
87 |
-
transcribed_texts.append(chunk_text)
|
88 |
-
progress((i + 1) / num_chunks, desc=f"์ฒญํฌ {i+1}/{num_chunks} ์ฒ๋ฆฌ ์ค")
|
89 |
-
|
90 |
-
# ์ ์ฒด ํ
์คํธ ์กฐํฉ
|
91 |
-
transcribed_text = " ".join(transcribed_texts)
|
92 |
-
progress(0.9, desc="ํ
์คํธ ๋ณํ ์๋ฃ")
|
93 |
-
|
94 |
-
# ํ
์คํธ ์์ฝ
|
95 |
-
try:
|
96 |
-
# ๊ธด ํ
์คํธ๋ฅผ ์ํ ์์ฝ ํ๋กฌํํธ
|
97 |
-
prompt = f"""๋ค์ ํ
์คํธ๋ฅผ ๊ฐ๋จํ ์์ฝํด์ฃผ์ธ์:
|
98 |
-
ํ
์คํธ: {transcribed_text[:3000]}...
|
99 |
-
์์ฝ:"""
|
100 |
-
|
101 |
-
response = hf_client.text_generation(
|
102 |
-
model="CohereForAI/c4ai-command-r-plus-08-2024",
|
103 |
-
prompt=prompt,
|
104 |
-
max_new_tokens=250,
|
105 |
-
temperature=0.3,
|
106 |
-
top_p=0.9,
|
107 |
-
repetition_penalty=1.2,
|
108 |
-
stop_sequences=["\n", "ํ
์คํธ:", "์์ฝ:"]
|
109 |
-
)
|
110 |
-
|
111 |
-
summary_text = str(response)
|
112 |
-
if "์์ฝ:" in summary_text:
|
113 |
-
summary_text = summary_text.split("์์ฝ:")[1].strip()
|
114 |
-
|
115 |
-
except Exception as e:
|
116 |
-
print(f"์์ฝ ์์ฑ ์ค ์ค๋ฅ ๋ฐ์: {str(e)}")
|
117 |
-
summary_text = "์์ฝ์ ์์ฑํ ์ ์์ต๋๋ค."
|
118 |
-
|
119 |
-
progress(1.0, desc="์ฒ๋ฆฌ ์๋ฃ")
|
120 |
-
return [transcribed_text, summary_text]
|
121 |
-
|
122 |
-
except Exception as e:
|
123 |
-
error_msg = f"์์ฑ ์ฒ๋ฆฌ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}"
|
124 |
-
return ["", error_msg]
|
125 |
-
|
126 |
-
# CSS ์คํ์ผ
|
127 |
-
css = """
|
128 |
-
footer { visibility: hidden; }
|
129 |
-
.progress-bar { height: 15px; border-radius: 5px; }
|
130 |
-
.container { max-width: 1200px; margin: auto; padding: 20px; }
|
131 |
-
.output-text { font-size: 16px; line-height: 1.5; }
|
132 |
-
.status-display {
|
133 |
-
background: #f0f0f0;
|
134 |
-
padding: 10px;
|
135 |
-
border-radius: 5px;
|
136 |
-
margin: 10px 0;
|
137 |
-
}
|
138 |
-
"""
|
139 |
-
|
140 |
-
# ํ์ผ ์
๋ก๋ ์ธํฐํ์ด์ค
|
141 |
-
file_transcribe = gr.Interface(
|
142 |
-
fn=transcribe_summarize,
|
143 |
-
inputs=[
|
144 |
-
gr.Audio(
|
145 |
-
sources="upload",
|
146 |
-
type="filepath",
|
147 |
-
label="์ค๋์ค ํ์ผ"
|
148 |
-
),
|
149 |
-
gr.Radio(
|
150 |
-
choices=["transcribe", "translate"],
|
151 |
-
label="์์
์ ํ",
|
152 |
-
value="transcribe"
|
153 |
-
)
|
154 |
-
],
|
155 |
-
outputs=[
|
156 |
-
gr.Textbox(
|
157 |
-
label="๋ณํ๋ ํ
์คํธ",
|
158 |
-
lines=10,
|
159 |
-
max_lines=30,
|
160 |
-
placeholder="์์ฑ์ด ํ
์คํธ๋ก ๋ณํ๋์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...",
|
161 |
-
elem_classes="output-text"
|
162 |
-
),
|
163 |
-
gr.Textbox(
|
164 |
-
label="์์ฝ",
|
165 |
-
lines=5,
|
166 |
-
placeholder="ํ
์คํธ ์์ฝ์ด ์ฌ๊ธฐ์ ํ์๋ฉ๋๋ค...",
|
167 |
-
elem_classes="output-text"
|
168 |
-
)
|
169 |
-
],
|
170 |
-
title="๐ค ์์ฑ ๋ณํ AI",
|
171 |
-
description="""
|
172 |
-
๊ธด ์์ฑ ํ์ผ(1์๊ฐ ์ด์)๋ ์ฒ๋ฆฌํ ์ ์์ต๋๋ค.
|
173 |
-
์ต๋ 3๊ฐ์ ํ์ผ์ ๋์์ ์ฒ๋ฆฌํ ์ ์์ต๋๋ค.
|
174 |
-
์ฒ๋ฆฌ ์๊ฐ์ ํ์ผ ๊ธธ์ด์ ๋น๋กํ์ฌ ์ฆ๊ฐํฉ๋๋ค.
|
175 |
-
""",
|
176 |
-
article="""
|
177 |
-
์ฌ์ฉ ๋ฐฉ๋ฒ:
|
178 |
-
1. ์ค๋์ค ํ์ผ์ ์
๋ก๋ํ๊ฑฐ๋ ๋ง์ดํฌ๋ก ๋
น์ํ์ธ์
|
179 |
-
2. ์์
์ ํ์ ์ ํํ์ธ์ (๋ณํ ๋๋ ๋ฒ์ญ)
|
180 |
-
3. ๋ณํ ์์ ๋ฒํผ์ ํด๋ฆญํ์ธ์
|
181 |
-
4. ์งํ ์ํฉ์ ํ์ธํ์ธ์
|
182 |
-
""",
|
183 |
-
examples=[],
|
184 |
-
cache_examples=False,
|
185 |
-
flagging_mode="never"
|
186 |
-
)
|
187 |
-
|
188 |
-
# ๋ง์ดํฌ ๋
น์ ์ธํฐํ์ด์ค
|
189 |
-
mic_transcribe = gr.Interface(
|
190 |
-
fn=transcribe_summarize,
|
191 |
-
inputs=[
|
192 |
-
gr.Audio(
|
193 |
-
sources="microphone",
|
194 |
-
type="filepath",
|
195 |
-
label="๋ง์ดํฌ ๋
น์"
|
196 |
-
),
|
197 |
-
gr.Radio(
|
198 |
-
choices=["transcribe", "translate"],
|
199 |
-
label="์์
์ ํ",
|
200 |
-
value="transcribe"
|
201 |
-
)
|
202 |
-
],
|
203 |
-
outputs=[
|
204 |
-
gr.Textbox(
|
205 |
-
label="๋ณํ๋ ํ
์คํธ",
|
206 |
-
lines=10,
|
207 |
-
max_lines=30,
|
208 |
-
elem_classes="output-text"
|
209 |
-
),
|
210 |
-
gr.Textbox(
|
211 |
-
label="์์ฝ",
|
212 |
-
lines=5,
|
213 |
-
elem_classes="output-text"
|
214 |
-
)
|
215 |
-
],
|
216 |
-
title="๐ค ์์ฑ ๋ณํ AI",
|
217 |
-
description="๋ง์ดํฌ๋ก ์์ฑ์ ๋
น์ํ์ฌ ํ
์คํธ๋ก ๋ณํํ๊ณ ์์ฝํ ์ ์์ต๋๋ค.",
|
218 |
-
flagging_mode="never"
|
219 |
-
)
|
220 |
-
|
221 |
-
# ๋ฉ์ธ ์ ํ๋ฆฌ์ผ์ด์
|
222 |
-
demo = gr.Blocks(theme="gradio/soft", css=css)
|
223 |
-
with demo:
|
224 |
-
gr.TabbedInterface(
|
225 |
-
[file_transcribe, mic_transcribe],
|
226 |
-
["์ค๋์ค ํ์ผ", "๋ง์ดํฌ ๋
น์"]
|
227 |
-
)
|
228 |
-
|
229 |
-
# ์ ํ๋ฆฌ์ผ์ด์
์คํ
|
230 |
-
demo.queue().launch(
|
231 |
-
server_name="0.0.0.0",
|
232 |
-
share=False,
|
233 |
-
debug=True,
|
234 |
-
ssr_mode=False,
|
235 |
-
max_threads=3 # ๋์ ์ฒ๋ฆฌ 3๊ฑด์ผ๋ก ์ค์
|
236 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|