Spaces:
Running
Running
import gradio as gr | |
import pandas as pd | |
import os | |
import time | |
import threading | |
import tempfile | |
import logging | |
import random | |
import uuid | |
import shutil | |
import glob | |
from datetime import datetime | |
from gradio_client import Client | |
# λ‘κΉ μ€μ | |
logging.basicConfig( | |
level=logging.INFO, | |
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', | |
handlers=[ | |
logging.StreamHandler(), | |
logging.FileHandler('control_tower_app.log', mode='a') | |
] | |
) | |
logger = logging.getLogger(__name__) | |
# API ν΄λΌμ΄μΈνΈ μ€μ | |
def get_client(): | |
# νκ²½λ³μμμ API μλν¬μΈνΈ μ½κΈ° | |
endpoint = os.environ.get('API_ENDPOINT', '') | |
# νκ²½λ³μμ νμΌ λ΄μ©μ΄λ λΆνμν ν μ€νΈκ° λ€μ΄μ¨ κ²½μ° μ 리 | |
if endpoint: | |
# μ€λ°κΏμΌλ‘ λΆλ¦¬ν΄μ 첫 λ²μ§Έ μ ν¨ν λΌμΈ μ°ΎκΈ° | |
lines = endpoint.split('\n') | |
for line in lines: | |
line = line.strip() | |
# μ£Όμμ΄λ λΉμ΄μλ λΌμΈ μ μΈ | |
if line and not line.startswith('#') and '/' in line: | |
# API_ENDPOINT= κ°μ ν€κ° μ κ±° | |
if '=' in line: | |
line = line.split('=', 1)[1].strip() | |
# λ°μ΄ν μ κ±° | |
line = line.strip('"\'') | |
if line and '/' in line and len(line) < 50: | |
return Client(line) | |
raise ValueError("μ¬λ°λ₯Έ API_ENDPOINTλ₯Ό μ€μ ν΄μ£ΌμΈμ (μ: username/repo-name)") | |
# μΈμ λ³ μμ νμΌ κ΄λ¦¬λ₯Ό μν λμ λ리 | |
session_temp_files = {} | |
session_data = {} | |
def cleanup_huggingface_temp_folders(): | |
"""νκΉ νμ΄μ€ μμ ν΄λ μ΄κΈ° μ 리""" | |
try: | |
temp_dirs = [ | |
tempfile.gettempdir(), | |
"/tmp", | |
"/var/tmp", | |
os.path.join(os.getcwd(), "temp"), | |
os.path.join(os.getcwd(), "tmp"), | |
"/gradio_cached_examples", | |
"/flagged" | |
] | |
cleanup_count = 0 | |
for temp_dir in temp_dirs: | |
if os.path.exists(temp_dir): | |
try: | |
session_files = glob.glob(os.path.join(temp_dir, "session_*.xlsx")) | |
session_files.extend(glob.glob(os.path.join(temp_dir, "session_*.csv"))) | |
session_files.extend(glob.glob(os.path.join(temp_dir, "*keyword*.xlsx"))) | |
session_files.extend(glob.glob(os.path.join(temp_dir, "*keyword*.csv"))) | |
session_files.extend(glob.glob(os.path.join(temp_dir, "tmp*.xlsx"))) | |
session_files.extend(glob.glob(os.path.join(temp_dir, "tmp*.csv"))) | |
for file_path in session_files: | |
try: | |
if os.path.getmtime(file_path) < time.time() - 3600: | |
os.remove(file_path) | |
cleanup_count += 1 | |
logger.info(f"μ΄κΈ° μ 리: μ€λλ μμ νμΌ μμ - {file_path}") | |
except Exception as e: | |
logger.warning(f"νμΌ μμ μ€ν¨ (무μλ¨): {file_path} - {e}") | |
except Exception as e: | |
logger.warning(f"μμ λλ ν 리 μ 리 μ€ν¨ (무μλ¨): {temp_dir} - {e}") | |
logger.info(f"β νκΉ νμ΄μ€ μμ ν΄λ μ΄κΈ° μ 리 μλ£ - {cleanup_count}κ° νμΌ μμ ") | |
try: | |
gradio_temp_dir = os.path.join(os.getcwd(), "gradio_cached_examples") | |
if os.path.exists(gradio_temp_dir): | |
shutil.rmtree(gradio_temp_dir, ignore_errors=True) | |
logger.info("Gradio μΊμ ν΄λ μ 리 μλ£") | |
except Exception as e: | |
logger.warning(f"Gradio μΊμ ν΄λ μ 리 μ€ν¨ (무μλ¨): {e}") | |
except Exception as e: | |
logger.error(f"μ΄κΈ° μμ ν΄λ μ 리 μ€ μ€λ₯ (κ³μ μ§ν): {e}") | |
def setup_clean_temp_environment(): | |
"""κΉ¨λν μμ νκ²½ μ€μ """ | |
try: | |
cleanup_huggingface_temp_folders() | |
app_temp_dir = os.path.join(tempfile.gettempdir(), "keyword_app") | |
if os.path.exists(app_temp_dir): | |
shutil.rmtree(app_temp_dir, ignore_errors=True) | |
os.makedirs(app_temp_dir, exist_ok=True) | |
os.environ['KEYWORD_APP_TEMP'] = app_temp_dir | |
logger.info(f"β μ ν리μΌμ΄μ μ μ© μμ λλ ν 리 μ€μ : {app_temp_dir}") | |
return app_temp_dir | |
except Exception as e: | |
logger.error(f"μμ νκ²½ μ€μ μ€ν¨: {e}") | |
return tempfile.gettempdir() | |
def get_app_temp_dir(): | |
"""μ ν리μΌμ΄μ μ μ© μμ λλ ν 리 λ°ν""" | |
return os.environ.get('KEYWORD_APP_TEMP', tempfile.gettempdir()) | |
def get_session_id(): | |
"""μΈμ ID μμ±""" | |
return str(uuid.uuid4()) | |
def cleanup_session_files(session_id, delay=300): | |
"""μΈμ λ³ μμ νμΌ μ 리 ν¨μ""" | |
def cleanup(): | |
time.sleep(delay) | |
if session_id in session_temp_files: | |
files_to_remove = session_temp_files[session_id].copy() | |
del session_temp_files[session_id] | |
for file_path in files_to_remove: | |
try: | |
if os.path.exists(file_path): | |
os.remove(file_path) | |
logger.info(f"μΈμ {session_id[:8]}... μμ νμΌ μμ : {file_path}") | |
except Exception as e: | |
logger.error(f"μΈμ {session_id[:8]}... νμΌ μμ μ€λ₯: {e}") | |
threading.Thread(target=cleanup, daemon=True).start() | |
def register_session_file(session_id, file_path): | |
"""μΈμ λ³ νμΌ λ±λ‘""" | |
if session_id not in session_temp_files: | |
session_temp_files[session_id] = [] | |
session_temp_files[session_id].append(file_path) | |
def cleanup_old_sessions(): | |
"""μ€λλ μΈμ λ°μ΄ν° μ 리""" | |
current_time = time.time() | |
sessions_to_remove = [] | |
for session_id, data in session_data.items(): | |
if current_time - data.get('last_activity', 0) > 3600: | |
sessions_to_remove.append(session_id) | |
for session_id in sessions_to_remove: | |
if session_id in session_temp_files: | |
for file_path in session_temp_files[session_id]: | |
try: | |
if os.path.exists(file_path): | |
os.remove(file_path) | |
logger.info(f"μ€λλ μΈμ {session_id[:8]}... νμΌ μμ : {file_path}") | |
except Exception as e: | |
logger.error(f"μ€λλ μΈμ νμΌ μμ μ€λ₯: {e}") | |
del session_temp_files[session_id] | |
if session_id in session_data: | |
del session_data[session_id] | |
logger.info(f"μ€λλ μΈμ λ°μ΄ν° μμ : {session_id[:8]}...") | |
def update_session_activity(session_id): | |
"""μΈμ νλ μκ° μ λ°μ΄νΈ""" | |
if session_id not in session_data: | |
session_data[session_id] = {} | |
session_data[session_id]['last_activity'] = time.time() | |
def create_session_temp_file(session_id, suffix='.xlsx'): | |
"""μΈμ λ³ μμ νμΌ μμ± (μ μ© λλ ν 리 μ¬μ©)""" | |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
random_suffix = str(random.randint(1000, 9999)) | |
temp_dir = get_app_temp_dir() | |
filename = f"session_{session_id[:8]}_{timestamp}_{random_suffix}{suffix}" | |
temp_file_path = os.path.join(temp_dir, filename) | |
with open(temp_file_path, 'w') as f: | |
pass | |
register_session_file(session_id, temp_file_path) | |
return temp_file_path | |
def wrapper_modified(keyword, korean_only, apply_main_keyword_option, exclude_zero_volume, session_id): | |
"""ν€μλ κ²μ λ° μ²λ¦¬ λνΌ ν¨μ (API μ¬μ©)""" | |
update_session_activity(session_id) | |
try: | |
client = get_client() | |
result = client.predict( | |
keyword=keyword, | |
korean_only=korean_only, | |
apply_main_keyword=apply_main_keyword_option, | |
exclude_zero_volume=exclude_zero_volume, | |
api_name="/process_search_results" | |
) | |
# API μλ΅ νμΈ λ° μ²λ¦¬ (6κ° κ°) | |
logger.info(f"API μλ΅ νμ : {type(result)}, κΈΈμ΄: {len(result) if isinstance(result, (list, tuple)) else 'N/A'}") | |
logger.info(f"API μλ΅ λ΄μ©: {result}") | |
if isinstance(result, (list, tuple)) and len(result) >= 6: | |
table_html, cat_choices, vol_choices, selected_cat, download_file, extra = result[:6] | |
logger.info(f"table_html νμ : {type(table_html)}") | |
logger.info(f"cat_choices: {cat_choices}") | |
logger.info(f"vol_choices: {vol_choices}") | |
logger.info(f"selected_cat: {selected_cat}") | |
logger.info(f"download_file: {download_file}") | |
elif isinstance(result, (list, tuple)) and len(result) >= 5: | |
table_html, cat_choices, vol_choices, selected_cat, download_file = result[:5] | |
extra = None | |
else: | |
# μλ΅μ΄ μμκ³Ό λ€λ₯Έ κ²½μ° κΈ°λ³Έκ° μ¬μ© | |
logger.warning(f"μμκ³Ό λ€λ₯Έ API μλ΅: {result}") | |
table_html = "<p>κ²μ κ²°κ³Όλ₯Ό μ²λ¦¬νλ μ€ μ€λ₯κ° λ°μνμ΅λλ€.</p>" | |
cat_choices = ["μ 체 보기"] | |
vol_choices = ["μ 체"] | |
selected_cat = "μ 체 보기" | |
download_file = None | |
# table_html μ²λ¦¬ (dictμΈ κ²½μ° value μΆμΆ) | |
if isinstance(table_html, dict) and 'value' in table_html: | |
table_html = table_html['value'] | |
elif isinstance(table_html, dict): | |
table_html = str(table_html) # dictλ₯Ό λ¬Έμμ΄λ‘ λ³ν | |
# choices νμ μ²λ¦¬ (μ€μ²© 리μ€νΈμΈ κ²½μ° μ²« λ²μ§Έ κ°λ§ μ¬μ©) | |
if isinstance(cat_choices, dict) and 'choices' in cat_choices: | |
cat_choices = [choice[0] if isinstance(choice, list) else choice for choice in cat_choices['choices']] | |
elif isinstance(cat_choices, list) and cat_choices and isinstance(cat_choices[0], list): | |
cat_choices = [choice[0] for choice in cat_choices] | |
if isinstance(vol_choices, dict) and 'choices' in vol_choices: | |
vol_choices = [choice[0] if isinstance(choice, list) else choice for choice in vol_choices['choices']] | |
elif isinstance(vol_choices, list) and vol_choices and isinstance(vol_choices[0], list): | |
vol_choices = [choice[0] for choice in vol_choices] | |
# selected_cat μ²λ¦¬ | |
if isinstance(selected_cat, dict) and 'value' in selected_cat: | |
selected_cat = selected_cat['value'] | |
elif isinstance(selected_cat, list): | |
selected_cat = selected_cat[0] if selected_cat else "μ 체 보기" | |
logger.info(f"μ²λ¦¬λ cat_choices: {cat_choices}") | |
logger.info(f"μ²λ¦¬λ vol_choices: {vol_choices}") | |
logger.info(f"μ²λ¦¬λ selected_cat: {selected_cat}") | |
local_file = None | |
if download_file: | |
try: | |
local_file = create_session_temp_file(session_id, '.xlsx') | |
shutil.copy(download_file, local_file) | |
logger.info(f"νμΌ λ³΅μ¬ μλ£: {local_file}") | |
except Exception as file_error: | |
logger.error(f"νμΌ λ³΅μ¬ μ€λ₯: {file_error}") | |
return ( | |
table_html, | |
gr.update(choices=cat_choices), | |
gr.update(choices=vol_choices), | |
None, | |
gr.update(choices=cat_choices, value=selected_cat), # λΆμμ© μΉ΄ν κ³ λ¦¬λ κ°μ μ νμ§λ‘ μ λ°μ΄νΈ | |
local_file, | |
gr.update(visible=True), | |
gr.update(visible=True), | |
keyword | |
) | |
except Exception as e: | |
logger.error(f"API νΈμΆ μ€λ₯: {e}") | |
import traceback | |
logger.error(f"μμΈ μ€λ₯: {traceback.format_exc()}") | |
return ( | |
gr.update(value="<p>κ²μ κ²°κ³Όκ° μμ΅λλ€. λ€λ₯Έ ν€μλλ‘ μλν΄λ³΄μΈμ.</p>"), | |
gr.update(choices=["μ 체 보기"]), | |
gr.update(choices=["μ 체"]), | |
None, | |
gr.update(choices=["μ 체 보기"], value="μ 체 보기"), | |
None, | |
gr.update(visible=False), | |
gr.update(visible=False), | |
keyword | |
) | |
def analyze_with_auto_download(analysis_keywords, selected_category, state_df, session_id): | |
"""μΉ΄ν κ³ λ¦¬ μΌμΉ λΆμ μ€ν λ° μλ λ€μ΄λ‘λ (API μ¬μ©)""" | |
update_session_activity(session_id) | |
try: | |
client = get_client() | |
result = client.predict( | |
analysis_keywords=analysis_keywords, | |
selected_category=selected_category, | |
api_name="/process_analyze_results" | |
) | |
# API μλ΅ νμΈ λ° μ²λ¦¬ | |
logger.info(f"λΆμ API μλ΅ νμ : {type(result)}, κΈΈμ΄: {len(result) if isinstance(result, (list, tuple)) else 'N/A'}") | |
if isinstance(result, (list, tuple)) and len(result) >= 2: | |
analysis_result, download_file = result[:2] | |
elif isinstance(result, str): | |
analysis_result = result | |
download_file = None | |
else: | |
logger.warning(f"μμκ³Ό λ€λ₯Έ λΆμ API μλ΅: {result}") | |
analysis_result = "λΆμ κ²°κ³Όλ₯Ό μ²λ¦¬νλ μ€ μ€λ₯κ° λ°μνμ΅λλ€." | |
download_file = None | |
local_file = None | |
if download_file: | |
local_file = create_session_temp_file(session_id, '.xlsx') | |
shutil.copy(download_file, local_file) | |
return analysis_result, local_file, gr.update(visible=True) | |
except Exception as e: | |
logger.error(f"λΆμ API νΈμΆ μ€λ₯: {e}") | |
return "λΆμ μ€ μ€λ₯κ° λ°μνμ΅λλ€. λ€μ μλν΄μ£ΌμΈμ.", None, gr.update(visible=False) | |
def filter_and_sort_table(df, selected_cat, keyword_sort, total_volume_sort, usage_count_sort, selected_volume_range, exclude_zero_volume, session_id): | |
"""ν μ΄λΈ νν°λ§ λ° μ λ ¬ ν¨μ (API μ¬μ©)""" | |
update_session_activity(session_id) | |
try: | |
client = get_client() | |
result = client.predict( | |
selected_cat=selected_cat, | |
keyword_sort=keyword_sort, | |
total_volume_sort=total_volume_sort, | |
usage_count_sort=usage_count_sort, | |
selected_volume_range=selected_volume_range, | |
exclude_zero_volume=exclude_zero_volume, | |
api_name="/filter_and_sort_table" | |
) | |
return result | |
except Exception as e: | |
logger.error(f"νν°λ§ API νΈμΆ μ€λ₯: {e}") | |
return "" | |
def update_category_selection(selected_cat, session_id): | |
"""μΉ΄ν κ³ λ¦¬ νν° μ ν μ λΆμν μΉ΄ν κ³ λ¦¬λ κ°μ κ°μΌλ‘ μ λ°μ΄νΈ""" | |
update_session_activity(session_id) | |
logger.info(f"μΉ΄ν κ³ λ¦¬ μ ν λ³κ²½: {selected_cat}") | |
# λ‘컬μμ μ§μ μ²λ¦¬ (API νΈμΆ μμ΄) | |
return gr.update(value=selected_cat) | |
def reset_interface(session_id): | |
"""μΈν°νμ΄μ€ 리μ ν¨μ - μΈμ λ³ λ°μ΄ν° μ΄κΈ°ν""" | |
update_session_activity(session_id) | |
if session_id in session_temp_files: | |
for file_path in session_temp_files[session_id]: | |
try: | |
if os.path.exists(file_path): | |
os.remove(file_path) | |
logger.info(f"μΈμ {session_id[:8]}... 리μ μ νμΌ μμ : {file_path}") | |
except Exception as e: | |
logger.error(f"μΈμ {session_id[:8]}... 리μ μ νμΌ μμ μ€λ₯: {e}") | |
session_temp_files[session_id] = [] | |
try: | |
client = get_client() | |
result = client.predict(api_name="/reset_interface") | |
return result | |
except Exception as e: | |
logger.error(f"리μ API νΈμΆ μ€λ₯: {e}") | |
return ( | |
"", True, False, "λ©μΈν€μλ μ μ©", "", ["μ 체 보기"], "μ 체 보기", | |
["μ 체"], "μ 체", "μ λ ¬ μμ", "μ λ ¬ μμ", None, ["μ 체 보기"], | |
"μ 체 보기", "", "", None, gr.update(visible=False), | |
gr.update(visible=False), "" | |
) | |
def search_with_loading(keyword, korean_only, apply_main_keyword, exclude_zero_volume, session_id): | |
update_session_activity(session_id) | |
return (gr.update(visible=True), gr.update(visible=False)) | |
def process_search_results(keyword, korean_only, apply_main_keyword, exclude_zero_volume, session_id): | |
update_session_activity(session_id) | |
result = wrapper_modified(keyword, korean_only, apply_main_keyword, exclude_zero_volume, session_id) | |
table_html, cat_choices, vol_choices, df, selected_cat, excel, keyword_section_vis, cat_section_vis, new_keyword_state = result | |
# ν μ΄λΈμ΄ μμΌλ©΄ μΉμ λ€μ νμ | |
if table_html and table_html.get('value') if isinstance(table_html, dict) else table_html: | |
empty_placeholder_vis = False | |
keyword_section_visibility = True | |
execution_section_visibility = True | |
logger.info("ν μ΄λΈ λ°μ΄ν°κ° μμ - μΉμ λ€μ νμν©λλ€") | |
else: | |
empty_placeholder_vis = True | |
keyword_section_visibility = False | |
execution_section_visibility = False | |
logger.info("ν μ΄λΈ λ°μ΄ν°κ° μμ - κΈ°λ³Έ μνλ₯Ό μ μ§ν©λλ€") | |
return ( | |
table_html, cat_choices, vol_choices, df, selected_cat, excel, | |
gr.update(visible=keyword_section_visibility), | |
gr.update(visible=True), # μΉ΄ν κ³ λ¦¬ λΆμ μΉμ μ νμ νμ | |
gr.update(visible=False), # μ§ν μνλ μ¨κΉ | |
gr.update(visible=empty_placeholder_vis), | |
gr.update(visible=execution_section_visibility), | |
new_keyword_state | |
) | |
def analyze_with_loading(analysis_keywords, selected_category, state_df, session_id): | |
update_session_activity(session_id) | |
return gr.update(visible=True) | |
def process_analyze_results(analysis_keywords, selected_category, state_df, session_id): | |
update_session_activity(session_id) | |
results = analyze_with_auto_download(analysis_keywords, selected_category, state_df, session_id) | |
return results + (gr.update(visible=False),) | |
def start_session_cleanup_scheduler(): | |
"""μΈμ μ 리 μ€μΌμ€λ¬ μμ""" | |
def cleanup_scheduler(): | |
while True: | |
time.sleep(600) | |
cleanup_old_sessions() | |
cleanup_huggingface_temp_folders() | |
threading.Thread(target=cleanup_scheduler, daemon=True).start() | |
def cleanup_on_startup(): | |
"""μ ν리μΌμ΄μ μμ μ μ 체 μ 리""" | |
logger.info("π§Ή μ ν리μΌμ΄μ μμ - μ΄κΈ° μ 리 μμ μμ...") | |
cleanup_huggingface_temp_folders() | |
app_temp_dir = setup_clean_temp_environment() | |
global session_temp_files, session_data | |
session_temp_files.clear() | |
session_data.clear() | |
logger.info(f"β μ΄κΈ° μ 리 μμ μλ£ - μ± μ μ© λλ ν 리: {app_temp_dir}") | |
return app_temp_dir | |
def create_app(): | |
fontawesome_html = """ | |
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css"> | |
<link rel="stylesheet" href="https://cdn.jsdelivr.net/gh/orioncactus/pretendard/dist/web/static/pretendard.css"> | |
<link rel="stylesheet" href="https://fonts.googleapis.com/css2?family=Noto+Sans+KR:wght@300;400;500;700&display=swap"> | |
""" | |
try: | |
with open('style.css', 'r', encoding='utf-8') as f: | |
custom_css = f.read() | |
except: | |
custom_css = "" | |
with gr.Blocks(css=custom_css, theme=gr.themes.Default( | |
primary_hue="orange", | |
secondary_hue="orange", | |
font=[gr.themes.GoogleFont("Noto Sans KR"), "ui-sans-serif", "system-ui"] | |
)) as demo: | |
gr.HTML(fontawesome_html) | |
session_id = gr.State(get_session_id) | |
keyword_state = gr.State("") | |
with gr.Column(elem_classes="custom-frame fade-in"): | |
gr.HTML('<div class="section-title"><i class="fas fa-search"></i> κ²μ μ λ ₯</div>') | |
with gr.Row(): | |
with gr.Column(scale=1): | |
keyword = gr.Textbox(label="λ©μΈ ν€μλ", placeholder="μ: μ€μ§μ΄") | |
with gr.Column(scale=1): | |
search_btn = gr.Button("λ©μΈν€μλ λΆμ", elem_classes="custom-button") | |
with gr.Accordion("μ΅μ μ€μ ", open=False): | |
with gr.Row(): | |
with gr.Column(scale=1): | |
korean_only = gr.Checkbox(label="νκΈλ§ μΆμΆ", value=True) | |
with gr.Column(scale=1): | |
exclude_zero_volume = gr.Checkbox(label="κ²μλ 0 ν€μλ μ μΈ", value=False) | |
with gr.Row(): | |
with gr.Column(scale=1): | |
apply_main_keyword = gr.Radio( | |
["λ©μΈν€μλ μ μ©", "λ©μΈν€μλ λ―Έμ μ©"], | |
label="μ‘°ν© λ°©μ", | |
value="λ©μΈν€μλ μ μ©" | |
) | |
with gr.Column(scale=1): | |
gr.HTML("") | |
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as progress_section: | |
gr.HTML('<div class="section-title"><i class="fas fa-spinner"></i> λΆμ μ§ν μν</div>') | |
progress_html = gr.HTML(""" | |
<div style="padding: 15px; background-color: #f9f9f9; border-radius: 5px; margin: 10px 0; border: 1px solid #ddd;"> | |
<div style="margin-bottom: 10px; display: flex; align-items: center;"> | |
<i class="fas fa-spinner fa-spin" style="color: #FB7F0D; margin-right: 10px;"></i> | |
<span>ν€μλ λ°μ΄ν°λ₯Ό λΆμμ€μ λλ€. μ μλ§ κΈ°λ€λ €μ£ΌμΈμ...</span> | |
</div> | |
<div style="background-color: #e9ecef; height: 10px; border-radius: 5px; overflow: hidden;"> | |
<div class="progress-bar"></div> | |
</div> | |
</div> | |
""") | |
with gr.Column(elem_classes="custom-frame fade-in") as main_keyword_section: | |
gr.HTML('<div class="section-title"><i class="fas fa-table"></i> λ©μΈν€μλ λΆμ κ²°κ³Ό</div>') | |
empty_table_html = gr.HTML(""" | |
<table class="empty-table"> | |
<thead> | |
<tr> | |
<th>μλ²</th> | |
<th>μ‘°ν© ν€μλ</th> | |
<th>PCκ²μλ</th> | |
<th>λͺ¨λ°μΌκ²μλ</th> | |
<th>μ΄κ²μλ</th> | |
<th>κ²μλꡬκ°</th> | |
<th>ν€μλ μ¬μ©μμμ</th> | |
<th>ν€μλ μ¬μ©νμ</th> | |
<th>μν λ±λ‘ μΉ΄ν κ³ λ¦¬</th> | |
</tr> | |
</thead> | |
<tbody> | |
<tr> | |
<td colspan="9" style="padding: 30px; text-align: center;"> | |
κ²μμ μ€ννλ©΄ μ¬κΈ°μ κ²°κ³Όκ° νμλ©λλ€ | |
</td> | |
</tr> | |
</tbody> | |
</table> | |
""") | |
with gr.Column(visible=False) as keyword_analysis_section: | |
with gr.Row(): | |
with gr.Column(scale=1): | |
category_filter = gr.Dropdown( | |
choices=["μ 체 보기"], | |
label="μΉ΄ν κ³ λ¦¬ νν°", | |
value="μ 체 보기", | |
interactive=True | |
) | |
with gr.Column(scale=1): | |
total_volume_sort = gr.Dropdown( | |
choices=["μ λ ¬ μμ", "μ€λ¦μ°¨μ", "λ΄λ¦Όμ°¨μ"], | |
label="μ΄κ²μλ μ λ ¬", | |
value="μ λ ¬ μμ", | |
interactive=True | |
) | |
with gr.Row(): | |
with gr.Column(scale=1): | |
search_volume_filter = gr.Dropdown( | |
choices=["μ 체"], | |
label="κ²μλ κ΅¬κ° νν°", | |
value="μ 체", | |
interactive=True | |
) | |
with gr.Column(scale=1): | |
usage_count_sort = gr.Dropdown( | |
choices=["μ λ ¬ μμ", "μ€λ¦μ°¨μ", "λ΄λ¦Όμ°¨μ"], | |
label="ν€μλ μ¬μ©νμ μ λ ¬", | |
value="μ λ ¬ μμ", | |
interactive=True | |
) | |
gr.HTML("<div class='data-container' id='table_container'></div>") | |
table_output = gr.HTML(elem_classes="fade-in") | |
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as category_analysis_section: | |
gr.HTML('<div class="section-title"><i class="fas fa-chart-bar"></i> ν€μλ λΆμ</div>') | |
with gr.Row(): | |
with gr.Column(scale=1): | |
analysis_keywords = gr.Textbox( | |
label="ν€μλ μ λ ₯ (μ΅λ 20κ°, μΌν λλ μν°λ‘ ꡬλΆ)", | |
placeholder="μ: μ€μ§μ΄λ³Άμ, μ€μ§μ΄ μμ§, μ€μ§μ΄ μ리...", | |
lines=5 | |
) | |
with gr.Column(scale=1): | |
selected_category = gr.Dropdown( | |
label="λΆμν μΉ΄ν κ³ λ¦¬(λΆμ μ λ°λμ μ νν΄μ£ΌμΈμ)", | |
choices=["μ 체 보기"], | |
value="μ 체 보기", | |
interactive=True | |
) | |
with gr.Column(elem_classes="execution-section", visible=False) as execution_section: | |
gr.HTML('<div class="section-title"><i class="fas fa-play-circle"></i> μ€ν</div>') | |
with gr.Row(): | |
with gr.Column(scale=1): | |
analyze_btn = gr.Button( | |
"μΉ΄ν κ³ λ¦¬ μΌμΉ λΆμ", | |
elem_classes=["execution-button", "primary-button"] | |
) | |
with gr.Column(scale=1): | |
reset_btn = gr.Button( | |
"λͺ¨λ μ λ ₯ μ΄κΈ°ν", | |
elem_classes=["execution-button", "secondary-button"] | |
) | |
with gr.Column(elem_classes="custom-frame fade-in", visible=False) as analysis_output_section: | |
gr.HTML('<div class="section-title"><i class="fas fa-list-ul"></i> λΆμ κ²°κ³Ό μμ½</div>') | |
analysis_result = gr.HTML(elem_classes="fade-in") | |
with gr.Row(): | |
download_output = gr.File(label="ν€μλ λͺ©λ‘ λ€μ΄λ‘λ", visible=True) | |
state_df = gr.State() | |
search_btn.click( | |
fn=search_with_loading, | |
inputs=[keyword, korean_only, apply_main_keyword, exclude_zero_volume, session_id], | |
outputs=[progress_section, empty_table_html] | |
).then( | |
fn=process_search_results, | |
inputs=[keyword, korean_only, apply_main_keyword, exclude_zero_volume, session_id], | |
outputs=[ | |
table_output, category_filter, search_volume_filter, | |
state_df, selected_category, download_output, | |
keyword_analysis_section, category_analysis_section, | |
progress_section, empty_table_html, execution_section, | |
keyword_state | |
] | |
) | |
category_filter.change( | |
fn=filter_and_sort_table, | |
inputs=[ | |
state_df, category_filter, gr.Textbox(value="μ λ ¬ μμ", visible=False), | |
total_volume_sort, usage_count_sort, | |
search_volume_filter, exclude_zero_volume, session_id | |
], | |
outputs=[table_output] | |
) | |
category_filter.change( | |
fn=update_category_selection, | |
inputs=[category_filter, session_id], | |
outputs=[selected_category] | |
) | |
total_volume_sort.change( | |
fn=filter_and_sort_table, | |
inputs=[ | |
state_df, category_filter, gr.Textbox(value="μ λ ¬ μμ", visible=False), | |
total_volume_sort, usage_count_sort, | |
search_volume_filter, exclude_zero_volume, session_id | |
], | |
outputs=[table_output] | |
) | |
usage_count_sort.change( | |
fn=filter_and_sort_table, | |
inputs=[ | |
state_df, category_filter, gr.Textbox(value="μ λ ¬ μμ", visible=False), | |
total_volume_sort, usage_count_sort, | |
search_volume_filter, exclude_zero_volume, session_id | |
], | |
outputs=[table_output] | |
) | |
search_volume_filter.change( | |
fn=filter_and_sort_table, | |
inputs=[ | |
state_df, category_filter, gr.Textbox(value="μ λ ¬ μμ", visible=False), | |
total_volume_sort, usage_count_sort, | |
search_volume_filter, exclude_zero_volume, session_id | |
], | |
outputs=[table_output] | |
) | |
exclude_zero_volume.change( | |
fn=filter_and_sort_table, | |
inputs=[ | |
state_df, category_filter, gr.Textbox(value="μ λ ¬ μμ", visible=False), | |
total_volume_sort, usage_count_sort, | |
search_volume_filter, exclude_zero_volume, session_id | |
], | |
outputs=[table_output] | |
) | |
analyze_btn.click( | |
fn=analyze_with_loading, | |
inputs=[analysis_keywords, selected_category, state_df, session_id], | |
outputs=[progress_section] | |
).then( | |
fn=process_analyze_results, | |
inputs=[analysis_keywords, selected_category, state_df, session_id], | |
outputs=[analysis_result, download_output, analysis_output_section, progress_section] | |
) | |
reset_btn.click( | |
fn=reset_interface, | |
inputs=[session_id], | |
outputs=[ | |
keyword, korean_only, exclude_zero_volume, apply_main_keyword, | |
table_output, category_filter, category_filter, | |
search_volume_filter, search_volume_filter, | |
total_volume_sort, usage_count_sort, | |
state_df, selected_category, selected_category, | |
analysis_keywords, analysis_result, download_output, | |
keyword_analysis_section, analysis_output_section, | |
keyword_state | |
] | |
) | |
return demo | |
if __name__ == "__main__": | |
logger.info("π λ©μΈν€μλ λΆμ μ ν리μΌμ΄μ μμ...") | |
app_temp_dir = cleanup_on_startup() | |
start_session_cleanup_scheduler() | |
logger.info("===== λ©ν°μ μ λ©μΈν€μλ λΆμ Application Startup at %s =====", time.strftime("%Y-%m-%d %H:%M:%S")) | |
logger.info(f"π μμ νμΌ μ μ₯ μμΉ: {app_temp_dir}") | |
try: | |
app = create_app() | |
app.launch( | |
share=False, | |
server_name="0.0.0.0", | |
server_port=7860, | |
max_threads=40, | |
auth=None, | |
show_error=True, | |
quiet=False, | |
favicon_path=None, | |
ssl_verify=False | |
) | |
except Exception as e: | |
logger.error(f"μ ν리μΌμ΄μ μ€ν μ€ν¨: {e}") | |
raise | |
finally: | |
logger.info("π§Ή μ ν리μΌμ΄μ μ’ λ£ - μ΅μ’ μ 리 μμ ...") | |
try: | |
cleanup_huggingface_temp_folders() | |
if os.path.exists(app_temp_dir): | |
shutil.rmtree(app_temp_dir, ignore_errors=True) | |
logger.info("β μ΅μ’ μ 리 μλ£") | |
except Exception as e: | |
logger.error(f"μ΅μ’ μ 리 μ€ μ€λ₯: {e}") |