a-ragab-h-m's picture
Update app.py
bc77cc6 verified
import gradio as gr
import subprocess
import os
logs = []
inference_logs = []
# Ψ₯ΨΉΨ―Ψ§Ψ― Ψ§Ω„Ω…Ψ¬Ω„Ψ―Ψ§Ψͺ
data_dir = "/home/user/data"
download_dir = "/home/user/app/public"
os.makedirs(data_dir, exist_ok=True)
os.makedirs(download_dir, exist_ok=True)
def run_training():
global logs
logs = []
process = subprocess.Popen(
["python", "run.py"],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
bufsize=1
)
for line in process.stdout:
logs.append(line)
yield "\n".join(logs[-50:])
def run_inference():
global inference_logs
inference_logs = []
summary_line = ""
process = subprocess.Popen(
["python", "inference.py"],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
bufsize=1
)
for line in process.stdout:
inference_logs.append(line)
if "Summary for UI" in line:
summary_line = line.strip()
with open(os.path.join(data_dir, "inference_results.txt"), "a") as f:
f.write("=== New Inference Run ===\n")
f.writelines(inference_logs)
f.write("\n")
return summary_line or "\n".join(inference_logs[-50:])
def list_saved_files():
files = os.listdir(data_dir)
if not files:
return gr.update(choices=[]), "πŸ“‚ No files found in /data."
return gr.update(choices=files, value=files[0]), "\n".join(f"πŸ“„ {f}" for f in files)
def download_file(filename):
src_path = os.path.join(data_dir, filename)
dst_path = os.path.join(download_dir, filename)
if not os.path.exists(src_path):
return None
with open(src_path, "rb") as src, open(dst_path, "wb") as dst:
dst.write(src.read())
return dst_path
def upload_specific_file(file_obj, filename):
if file_obj is None:
return f"❌ No file selected for {filename}"
dest_path = os.path.join(data_dir, filename)
with open(file_obj.name, "rb") as src, open(dest_path, "wb") as dst:
dst.write(src.read())
return f"βœ… {filename} uploaded."
with gr.Blocks(title="VRP Solver Interface") as demo:
gr.Markdown("# πŸš› VRP Solver: Transformer + RL + OR-Tools")
with gr.Tab("πŸš€ Start Training"):
start_btn = gr.Button("Start Training")
output = gr.Textbox(label="Training Logs", lines=25)
start_btn.click(fn=run_training, outputs=output)
with gr.Tab("πŸ” Inference"):
gr.Markdown("### πŸ“€ Upload Required Files to Run Inference")
with gr.Row():
gr.Markdown("**params_saved.json**")
file_params = gr.File()
status_params = gr.Textbox(label="params_saved.json status", interactive=False)
file_params.change(fn=lambda f: upload_specific_file(f, "params_saved.json"), inputs=file_params, outputs=status_params)
with gr.Row():
gr.Markdown("**model_state_dict.pt**")
file_model = gr.File()
status_model = gr.Textbox(label="model_state_dict.pt status", interactive=False)
file_model.change(fn=lambda f: upload_specific_file(f, "model_state_dict.pt"), inputs=file_model, outputs=status_model)
with gr.Row():
gr.Markdown("**orders.csv (inference data)**")
file_orders = gr.File()
status_orders = gr.Textbox(label="orders.csv status", interactive=False)
file_orders.change(fn=lambda f: upload_specific_file(f, "orders.csv"), inputs=file_orders, outputs=status_orders)
infer_btn = gr.Button("Run Inference")
infer_output = gr.Textbox(label="Inference Output", lines=15)
infer_btn.click(fn=run_inference, outputs=infer_output)
with gr.Tab("πŸ“ Show Saved Files"):
list_btn = gr.Button("List /data Files")
file_output = gr.Textbox(label="Saved Files", lines=10)
file_dropdown = gr.Dropdown(choices=[], label="Select File to Download")
download_btn = gr.Button("⬇️ Download Selected File")
download_output = gr.File(label="Your File Will Appear Here")
list_btn.click(fn=list_saved_files, outputs=[file_dropdown, file_output])
download_btn.click(fn=download_file, inputs=file_dropdown, outputs=download_output)
demo.launch()