File size: 4,275 Bytes
6eed7cc
 
ab7a9fb
6eed7cc
25c9425
 
c4c7edc
99287a0
7498d21
99287a0
7498d21
99287a0
6eed7cc
 
 
 
 
 
 
 
 
 
 
 
 
7498d21
25c9425
 
 
 
a827798
25c9425
 
 
 
 
 
 
 
 
 
a827798
 
25c9425
99287a0
25c9425
a827798
25c9425
 
a827798
 
19c2b7c
 
 
7498d21
 
c4c7edc
 
99287a0
 
 
 
 
 
 
 
 
 
7498d21
 
 
 
 
 
 
 
 
25c9425
19c2b7c
 
6eed7cc
25c9425
19c2b7c
25c9425
 
6eed7cc
19c2b7c
7498d21
 
c4c7edc
7498d21
 
 
 
 
 
 
 
 
 
c4c7edc
bc77cc6
 
 
 
 
 
7498d21
 
25c9425
6eed7cc
19c2b7c
 
 
c4c7edc
d6a350c
 
99287a0
c4c7edc
d6a350c
 
25c9425
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import gradio as gr
import subprocess
import os

logs = []
inference_logs = []

# Ψ₯ΨΉΨ―Ψ§Ψ― Ψ§Ω„Ω…Ψ¬Ω„Ψ―Ψ§Ψͺ
data_dir = "/home/user/data"
download_dir = "/home/user/app/public"
os.makedirs(data_dir, exist_ok=True)
os.makedirs(download_dir, exist_ok=True)

def run_training():
    global logs
    logs = []
    process = subprocess.Popen(
        ["python", "run.py"],
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
        text=True,
        bufsize=1
    )
    for line in process.stdout:
        logs.append(line)
        yield "\n".join(logs[-50:])

def run_inference():
    global inference_logs
    inference_logs = []
    summary_line = ""

    process = subprocess.Popen(
        ["python", "inference.py"],
        stdout=subprocess.PIPE,
        stderr=subprocess.STDOUT,
        text=True,
        bufsize=1
    )
    for line in process.stdout:
        inference_logs.append(line)
        if "Summary for UI" in line:
            summary_line = line.strip()

    with open(os.path.join(data_dir, "inference_results.txt"), "a") as f:
        f.write("=== New Inference Run ===\n")
        f.writelines(inference_logs)
        f.write("\n")

    return summary_line or "\n".join(inference_logs[-50:])

def list_saved_files():
    files = os.listdir(data_dir)
    if not files:
        return gr.update(choices=[]), "πŸ“‚ No files found in /data."
    return gr.update(choices=files, value=files[0]), "\n".join(f"πŸ“„ {f}" for f in files)

def download_file(filename):
    src_path = os.path.join(data_dir, filename)
    dst_path = os.path.join(download_dir, filename)

    if not os.path.exists(src_path):
        return None

    with open(src_path, "rb") as src, open(dst_path, "wb") as dst:
        dst.write(src.read())

    return dst_path

def upload_specific_file(file_obj, filename):
    if file_obj is None:
        return f"❌ No file selected for {filename}"
    
    dest_path = os.path.join(data_dir, filename)
    with open(file_obj.name, "rb") as src, open(dest_path, "wb") as dst:
        dst.write(src.read())
    return f"βœ… {filename} uploaded."

with gr.Blocks(title="VRP Solver Interface") as demo:
    gr.Markdown("# πŸš› VRP Solver: Transformer + RL + OR-Tools")

    with gr.Tab("πŸš€ Start Training"):
        start_btn = gr.Button("Start Training")
        output = gr.Textbox(label="Training Logs", lines=25)
        start_btn.click(fn=run_training, outputs=output)

    with gr.Tab("πŸ” Inference"):
        gr.Markdown("### πŸ“€ Upload Required Files to Run Inference")

        with gr.Row():
            gr.Markdown("**params_saved.json**")
            file_params = gr.File()
            status_params = gr.Textbox(label="params_saved.json status", interactive=False)
            file_params.change(fn=lambda f: upload_specific_file(f, "params_saved.json"), inputs=file_params, outputs=status_params)

        with gr.Row():
            gr.Markdown("**model_state_dict.pt**")
            file_model = gr.File()
            status_model = gr.Textbox(label="model_state_dict.pt status", interactive=False)
            file_model.change(fn=lambda f: upload_specific_file(f, "model_state_dict.pt"), inputs=file_model, outputs=status_model)

        with gr.Row():
            gr.Markdown("**orders.csv (inference data)**")
            file_orders = gr.File()
            status_orders = gr.Textbox(label="orders.csv status", interactive=False)
            file_orders.change(fn=lambda f: upload_specific_file(f, "orders.csv"), inputs=file_orders, outputs=status_orders)

        infer_btn = gr.Button("Run Inference")
        infer_output = gr.Textbox(label="Inference Output", lines=15)
        infer_btn.click(fn=run_inference, outputs=infer_output)

    with gr.Tab("πŸ“ Show Saved Files"):
        list_btn = gr.Button("List /data Files")
        file_output = gr.Textbox(label="Saved Files", lines=10)
        file_dropdown = gr.Dropdown(choices=[], label="Select File to Download")
        download_btn = gr.Button("⬇️ Download Selected File")
        download_output = gr.File(label="Your File Will Appear Here")

        list_btn.click(fn=list_saved_files, outputs=[file_dropdown, file_output])
        download_btn.click(fn=download_file, inputs=file_dropdown, outputs=download_output)

demo.launch()