import gradio as gr from pywebcopy import save_website import os import shutil import uuid import zipfile def download_and_zip_website(url): # 一時フォルダの作成 project_id = str(uuid.uuid4()) base_path = f"./temp/{project_id}" os.makedirs(base_path, exist_ok=True) try: # Webサイトをダウンロード save_website( url=url, project_folder=base_path, project_name="site_copy", bypass_robots=True, debug=True ) # ZIPファイルを作成 zip_path = f"./temp/{project_id}.zip" with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf: for root, dirs, files in os.walk(base_path): for file in files: filepath = os.path.join(root, file) arcname = os.path.relpath(filepath, base_path) zipf.write(filepath, arcname=arcname) return zip_path except Exception as e: return f"エラー: {e}" with gr.Blocks() as demo: gr.Markdown("# 🌐 Webサイトミラー(ZIPダウンロード付き)") with gr.Row(): url_input = gr.Textbox(label="URLを入力してください", placeholder="https://example.com") download_button = gr.Button("サイトをダウンロード") result = gr.File(label="ダウンロードリンク(ZIP)") download_button.click( fn=download_and_zip_website, inputs=url_input, outputs=result ) demo.launch()