openfree commited on
Commit
b6039e9
ยท
verified ยท
1 Parent(s): 28db47c

Delete app-backup2.py

Browse files
Files changed (1) hide show
  1. app-backup2.py +0 -642
app-backup2.py DELETED
@@ -1,642 +0,0 @@
1
- """
2
- MOUSE Workflow - Visual Workflow Builder with UI Execution
3
- @Powered by VIDraft
4
- โœ“ Visual workflow designer with drag-and-drop
5
- โœ“ Import/Export JSON with copy-paste support
6
- โœ“ Auto-generate UI from workflow for end-user execution
7
- """
8
-
9
- import os, json, typing, tempfile, traceback
10
- import gradio as gr
11
- from gradio_workflowbuilder import WorkflowBuilder
12
-
13
- # Optional imports for LLM APIs
14
- try:
15
- from openai import OpenAI
16
- OPENAI_AVAILABLE = True
17
- except ImportError:
18
- OPENAI_AVAILABLE = False
19
- print("OpenAI library not available. Install with: pip install openai")
20
-
21
- try:
22
- import anthropic
23
- ANTHROPIC_AVAILABLE = True
24
- except ImportError:
25
- ANTHROPIC_AVAILABLE = False
26
- print("Anthropic library not available. Install with: pip install anthropic")
27
-
28
- try:
29
- import requests
30
- REQUESTS_AVAILABLE = True
31
- except ImportError:
32
- REQUESTS_AVAILABLE = False
33
- print("Requests library not available. Install with: pip install requests")
34
-
35
- # -------------------------------------------------------------------
36
- # ๐Ÿ› ๏ธ ํ—ฌํผ ํ•จ์ˆ˜๋“ค
37
- # -------------------------------------------------------------------
38
- def export_pretty(data: typing.Dict[str, typing.Any]) -> str:
39
- return json.dumps(data, indent=2, ensure_ascii=False) if data else "No workflow to export"
40
-
41
- def export_file(data: typing.Dict[str, typing.Any]) -> typing.Optional[str]:
42
- """์›Œํฌํ”Œ๋กœ์šฐ๋ฅผ JSON ํŒŒ์ผ๋กœ ๋‚ด๋ณด๋‚ด๊ธฐ"""
43
- if not data:
44
- return None
45
- fd, path = tempfile.mkstemp(suffix=".json", prefix="workflow_")
46
- try:
47
- with os.fdopen(fd, "w", encoding="utf-8") as f:
48
- json.dump(data, f, ensure_ascii=False, indent=2)
49
- return path
50
- except Exception as e:
51
- print(f"Error exporting file: {e}")
52
- return None
53
-
54
- def load_json_from_text_or_file(json_text: str, file_obj) -> typing.Tuple[typing.Dict[str, typing.Any], str]:
55
- """ํ…์ŠคํŠธ ๋˜๋Š” ํŒŒ์ผ์—์„œ JSON ๋กœ๋“œ"""
56
- # ํŒŒ์ผ์ด ์žˆ์œผ๋ฉด ํŒŒ์ผ ์šฐ์„ 
57
- if file_obj is not None:
58
- try:
59
- with open(file_obj.name, "r", encoding="utf-8") as f:
60
- json_text = f.read()
61
- except Exception as e:
62
- return None, f"โŒ Error reading file: {str(e)}"
63
-
64
- # JSON ํ…์ŠคํŠธ๊ฐ€ ์—†๊ฑฐ๋‚˜ ๋น„์–ด์žˆ์œผ๋ฉด
65
- if not json_text or json_text.strip() == "":
66
- return None, "No JSON data provided"
67
-
68
- try:
69
- # JSON ํŒŒ์‹ฑ
70
- data = json.loads(json_text.strip())
71
-
72
- # ๋ฐ์ดํ„ฐ ๊ฒ€์ฆ
73
- if not isinstance(data, dict):
74
- return None, "Invalid format: not a dictionary"
75
-
76
- # ํ•„์ˆ˜ ํ•„๋“œ ํ™•์ธ
77
- if 'nodes' not in data:
78
- data['nodes'] = []
79
- if 'edges' not in data:
80
- data['edges'] = []
81
-
82
- nodes_count = len(data.get('nodes', []))
83
- edges_count = len(data.get('edges', []))
84
-
85
- return data, f"โœ… Loaded: {nodes_count} nodes, {edges_count} edges"
86
-
87
- except json.JSONDecodeError as e:
88
- return None, f"โŒ JSON parsing error: {str(e)}"
89
- except Exception as e:
90
- return None, f"โŒ Error: {str(e)}"
91
-
92
- def create_sample_workflow():
93
- """์ƒ˜ํ”Œ ์›Œํฌํ”Œ๋กœ์šฐ ์ƒ์„ฑ"""
94
- return {
95
- "nodes": [
96
- {
97
- "id": "input_1",
98
- "type": "ChatInput",
99
- "position": {"x": 100, "y": 200},
100
- "data": {
101
- "label": "User Question",
102
- "template": {
103
- "input_value": {"value": "What is the capital of Korea?"}
104
- }
105
- }
106
- },
107
- {
108
- "id": "llm_1",
109
- "type": "llmNode",
110
- "position": {"x": 400, "y": 200},
111
- "data": {
112
- "label": "AI Processing",
113
- "template": {
114
- "model": {"value": "gpt-3.5-turbo"},
115
- "temperature": {"value": 0.7},
116
- "system_prompt": {"value": "You are a helpful assistant."}
117
- }
118
- }
119
- },
120
- {
121
- "id": "output_1",
122
- "type": "ChatOutput",
123
- "position": {"x": 700, "y": 200},
124
- "data": {"label": "Answer"}
125
- }
126
- ],
127
- "edges": [
128
- {"id": "e1", "source": "input_1", "target": "llm_1"},
129
- {"id": "e2", "source": "llm_1", "target": "output_1"}
130
- ]
131
- }
132
-
133
- # UI ์‹คํ–‰์„ ์œ„ํ•œ ์‹ค์ œ ์›Œํฌํ”Œ๋กœ์šฐ ์‹คํ–‰ ํ•จ์ˆ˜
134
- def execute_workflow_simple(workflow_data: dict, input_values: dict) -> dict:
135
- """์›Œํฌํ”Œ๋กœ์šฐ ์‹ค์ œ ์‹คํ–‰"""
136
- import traceback
137
-
138
- # API ํ‚ค ํ™•์ธ
139
- friendli_token = os.getenv("FRIENDLI_TOKEN")
140
- openai_key = os.getenv("OPENAI_API_KEY")
141
- anthropic_key = os.getenv("ANTHROPIC_API_KEY")
142
-
143
- # OpenAI ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ํ™•์ธ
144
- try:
145
- from openai import OpenAI
146
- openai_available = True
147
- except ImportError:
148
- openai_available = False
149
- print("OpenAI library not available")
150
-
151
- # Anthropic ๋ผ์ด๋ธŒ๋Ÿฌ๋ฆฌ ํ™•์ธ
152
- try:
153
- import anthropic
154
- anthropic_available = True
155
- except ImportError:
156
- anthropic_available = False
157
- print("Anthropic library not available")
158
-
159
- results = {}
160
- nodes = workflow_data.get("nodes", [])
161
- edges = workflow_data.get("edges", [])
162
-
163
- # ๋…ธ๋“œ๋ฅผ ์ˆœ์„œ๋Œ€๋กœ ์ฒ˜๋ฆฌ
164
- for node in nodes:
165
- node_id = node.get("id")
166
- node_type = node.get("type", "")
167
- node_data = node.get("data", {})
168
-
169
- try:
170
- if node_type in ["ChatInput", "textInput", "Input"]:
171
- # UI์—์„œ ์ œ๊ณต๋œ ์ž…๋ ฅ๊ฐ’ ์‚ฌ์šฉ
172
- if node_id in input_values:
173
- results[node_id] = input_values[node_id]
174
- else:
175
- # ๊ธฐ๋ณธ๊ฐ’ ์‚ฌ์šฉ
176
- template = node_data.get("template", {})
177
- default_value = template.get("input_value", {}).get("value", "")
178
- results[node_id] = default_value
179
-
180
- elif node_type in ["llmNode", "OpenAIModel", "ChatModel"]:
181
- # LLM ๋…ธ๋“œ ์ฒ˜๋ฆฌ
182
- template = node_data.get("template", {})
183
-
184
- # ๋ชจ๋ธ ์ •๋ณด ์ถ”์ถœ
185
- model_info = template.get("model", {})
186
- model = model_info.get("value", "gpt-3.5-turbo") if isinstance(model_info, dict) else "gpt-3.5-turbo"
187
-
188
- # ์˜จ๋„ ์ •๋ณด ์ถ”์ถœ
189
- temp_info = template.get("temperature", {})
190
- temperature = temp_info.get("value", 0.7) if isinstance(temp_info, dict) else 0.7
191
-
192
- # ์‹œ์Šคํ…œ ํ”„๋กฌํ”„ํŠธ ์ถ”์ถœ
193
- prompt_info = template.get("system_prompt", {})
194
- system_prompt = prompt_info.get("value", "") if isinstance(prompt_info, dict) else ""
195
-
196
- # ํ”„๋กœ๋ฐ”์ด๋” ์ •๋ณด ์ถ”์ถœ
197
- provider_info = template.get("provider", {})
198
- provider = provider_info.get("value", "OpenAI") if isinstance(provider_info, dict) else "OpenAI"
199
-
200
- # ์ž…๋ ฅ ํ…์ŠคํŠธ ์ฐพ๊ธฐ
201
- input_text = ""
202
- for edge in edges:
203
- if edge.get("target") == node_id:
204
- source_id = edge.get("source")
205
- if source_id in results:
206
- input_text = results[source_id]
207
- break
208
-
209
- # ์‹ค์ œ API ํ˜ธ์ถœ
210
- if provider == "OpenAI" and openai_key and openai_available:
211
- try:
212
- client = OpenAI(api_key=openai_key)
213
-
214
- messages = []
215
- if system_prompt:
216
- messages.append({"role": "system", "content": system_prompt})
217
- messages.append({"role": "user", "content": input_text})
218
-
219
- response = client.chat.completions.create(
220
- model=model,
221
- messages=messages,
222
- temperature=temperature,
223
- max_tokens=1000
224
- )
225
-
226
- results[node_id] = response.choices[0].message.content
227
-
228
- except Exception as e:
229
- results[node_id] = f"[OpenAI Error: {str(e)}]"
230
-
231
- elif provider == "Anthropic" and anthropic_key and anthropic_available:
232
- try:
233
- client = anthropic.Anthropic(api_key=anthropic_key)
234
-
235
- message = client.messages.create(
236
- model="claude-3-haiku-20240307",
237
- max_tokens=1000,
238
- temperature=temperature,
239
- system=system_prompt if system_prompt else None,
240
- messages=[{"role": "user", "content": input_text}]
241
- )
242
-
243
- results[node_id] = message.content[0].text
244
-
245
- except Exception as e:
246
- results[node_id] = f"[Anthropic Error: {str(e)}]"
247
-
248
- elif provider == "Friendly" and friendli_token:
249
- try:
250
- import requests
251
-
252
- headers = {
253
- "Authorization": f"Bearer {friendli_token}",
254
- "Content-Type": "application/json"
255
- }
256
-
257
- payload = {
258
- "model": "dep89a2fld32mcm",
259
- "messages": [
260
- {"role": "system", "content": system_prompt} if system_prompt else {"role": "system", "content": "You are a helpful assistant."},
261
- {"role": "user", "content": input_text}
262
- ],
263
- "max_tokens": 1000,
264
- "temperature": temperature
265
- }
266
-
267
- response = requests.post(
268
- "https://api.friendli.ai/dedicated/v1/chat/completions",
269
- headers=headers,
270
- json=payload,
271
- timeout=30
272
- )
273
-
274
- if response.status_code == 200:
275
- response_json = response.json()
276
- results[node_id] = response_json["choices"][0]["message"]["content"]
277
- else:
278
- results[node_id] = f"[Friendly API Error: {response.status_code}]"
279
-
280
- except Exception as e:
281
- results[node_id] = f"[Friendly Error: {str(e)}]"
282
-
283
- else:
284
- # API ํ‚ค๊ฐ€ ์—†๋Š” ๊ฒฝ์šฐ ์‹œ๋ฎฌ๋ ˆ์ด์…˜
285
- results[node_id] = f"[Simulated AI Response to: {input_text[:50]}...]"
286
-
287
- elif node_type in ["ChatOutput", "textOutput", "Output"]:
288
- # ์ถœ๋ ฅ ๋…ธ๋“œ๋Š” ์—ฐ๊ฒฐ๋œ ๋…ธ๋“œ์˜ ๊ฒฐ๊ณผ๋ฅผ ๊ฐ€์ ธ์˜ด
289
- for edge in edges:
290
- if edge.get("target") == node_id:
291
- source_id = edge.get("source")
292
- if source_id in results:
293
- results[node_id] = results[source_id]
294
- break
295
-
296
- except Exception as e:
297
- results[node_id] = f"[Node Error: {str(e)}]"
298
- print(f"Error processing node {node_id}: {traceback.format_exc()}")
299
-
300
- return results
301
-
302
- # -------------------------------------------------------------------
303
- # ๐ŸŽจ CSS
304
- # -------------------------------------------------------------------
305
- CSS = """
306
- .main-container{max-width:1600px;margin:0 auto;}
307
- .workflow-section{margin-bottom:2rem;min-height:500px;}
308
- .button-row{display:flex;gap:1rem;justify-content:center;margin:1rem 0;}
309
- .status-box{
310
- padding:10px;border-radius:5px;margin-top:10px;
311
- background:#f0f9ff;border:1px solid #3b82f6;color:#1e40af;
312
- }
313
- .component-description{
314
- padding:24px;background:linear-gradient(135deg,#f8fafc 0%,#e2e8f0 100%);
315
- border-left:4px solid #3b82f6;border-radius:12px;
316
- box-shadow:0 2px 8px rgba(0,0,0,.05);margin:16px 0;
317
- }
318
- .workflow-container{position:relative;}
319
- .ui-execution-section{
320
- background:linear-gradient(135deg,#f0fdf4 0%,#dcfce7 100%);
321
- padding:24px;border-radius:12px;margin:24px 0;
322
- border:1px solid #86efac;
323
- }
324
- .powered-by{
325
- text-align:center;color:#64748b;font-size:14px;
326
- margin-top:8px;font-style:italic;
327
- }
328
- """
329
-
330
- # -------------------------------------------------------------------
331
- # ๐Ÿ–ฅ๏ธ Gradio ์•ฑ
332
- # -------------------------------------------------------------------
333
- with gr.Blocks(title="๐Ÿญ MOUSE Workflow", theme=gr.themes.Soft(), css=CSS) as demo:
334
-
335
- with gr.Column(elem_classes=["main-container"]):
336
- gr.Markdown("# ๐Ÿญ MOUSE Workflow")
337
- gr.Markdown("**Visual Workflow Builder with Interactive UI Execution**")
338
- gr.HTML('<p class="powered-by">@Powered by VIDraft</p>')
339
-
340
- gr.HTML(
341
- """
342
- <div class="component-description">
343
- <p style="font-size:16px;margin:0;">Build sophisticated workflows visually โ€ข Import/Export JSON โ€ข Generate interactive UI for end-users</p>
344
- </div>
345
- """
346
- )
347
-
348
- # API Status Display
349
- with gr.Accordion("๐Ÿ”Œ API Status", open=False):
350
- gr.Markdown(f"""
351
- **Available APIs:**
352
- - FRIENDLI_TOKEN: {'โœ… Connected' if os.getenv("FRIENDLI_TOKEN") else 'โŒ Not found'}
353
- - OPENAI_API_KEY: {'โœ… Connected' if os.getenv("OPENAI_API_KEY") else 'โŒ Not found'}
354
- - ANTHROPIC_API_KEY: {'โœ… Connected' if os.getenv("ANTHROPIC_API_KEY") else 'โŒ Not found'}
355
-
356
- **Libraries:**
357
- - OpenAI: {'โœ… Installed' if OPENAI_AVAILABLE else 'โŒ Not installed'}
358
- - Anthropic: {'โœ… Installed' if ANTHROPIC_AVAILABLE else 'โŒ Not installed'}
359
- - Requests: {'โœ… Installed' if REQUESTS_AVAILABLE else 'โŒ Not installed'}
360
-
361
- *Note: Without API keys, the UI will simulate AI responses.*
362
- """)
363
-
364
- # State for storing workflow data
365
- loaded_data = gr.State(None)
366
- trigger_update = gr.State(False)
367
-
368
- # โ”€โ”€โ”€ Dynamic Workflow Container โ”€โ”€โ”€
369
- with gr.Column(elem_classes=["workflow-container"]):
370
- @gr.render(inputs=[loaded_data, trigger_update])
371
- def render_workflow(data, trigger):
372
- """๋™์ ์œผ๋กœ WorkflowBuilder ๋ Œ๋”๋ง"""
373
- workflow_value = data if data else {"nodes": [], "edges": []}
374
-
375
- return WorkflowBuilder(
376
- label="๐ŸŽจ Visual Workflow Designer",
377
- info="Drag from sidebar โ†’ Connect nodes โ†’ Edit properties",
378
- value=workflow_value,
379
- elem_id="main_workflow"
380
- )
381
-
382
- # โ”€โ”€โ”€ Import Section โ”€โ”€โ”€
383
- with gr.Accordion("๐Ÿ“ฅ Import Workflow", open=True):
384
- with gr.Row():
385
- with gr.Column(scale=2):
386
- import_json_text = gr.Code(
387
- language="json",
388
- label="Paste JSON here",
389
- lines=8,
390
- value='{\n "nodes": [],\n "edges": []\n}'
391
- )
392
- with gr.Column(scale=1):
393
- file_upload = gr.File(
394
- label="Or upload JSON file",
395
- file_types=[".json"],
396
- type="filepath"
397
- )
398
- btn_load = gr.Button("๐Ÿ“ฅ Load Workflow", variant="primary", size="lg")
399
- btn_sample = gr.Button("๐ŸŽฏ Load Sample", variant="secondary")
400
-
401
- # Status
402
- status_text = gr.Textbox(
403
- label="Status",
404
- value="Ready",
405
- elem_classes=["status-box"],
406
- interactive=False
407
- )
408
-
409
- # โ”€โ”€โ”€ Export Section โ”€โ”€โ”€
410
- gr.Markdown("## ๐Ÿ’พ Export")
411
-
412
- with gr.Row():
413
- with gr.Column(scale=3):
414
- export_preview = gr.Code(
415
- language="json",
416
- label="Current Workflow JSON",
417
- lines=8
418
- )
419
- with gr.Column(scale=1):
420
- btn_preview = gr.Button("๐Ÿ‘๏ธ Preview JSON", size="lg")
421
- btn_download = gr.DownloadButton("๐Ÿ’พ Download JSON", size="lg")
422
-
423
- # โ”€โ”€โ”€ UI Execution Section โ”€โ”€โ”€
424
- with gr.Column(elem_classes=["ui-execution-section"]):
425
- gr.Markdown("## ๐Ÿš€ UI Execution")
426
- gr.Markdown("Generate an interactive UI from your workflow for end-users")
427
-
428
- btn_execute_ui = gr.Button("โ–ถ๏ธ Generate & Run UI", variant="primary", size="lg")
429
-
430
- # UI execution state
431
- ui_workflow_data = gr.State(None)
432
-
433
- # Dynamic UI container
434
- @gr.render(inputs=[ui_workflow_data])
435
- def render_execution_ui(workflow_data):
436
- if not workflow_data or not workflow_data.get("nodes"):
437
- gr.Markdown("*Load a workflow first, then click 'Generate & Run UI'*")
438
- return
439
-
440
- gr.Markdown("### ๐Ÿ“‹ Generated UI")
441
-
442
- # Extract input and output nodes
443
- input_nodes = []
444
- output_nodes = []
445
-
446
- for node in workflow_data.get("nodes", []):
447
- node_type = node.get("type", "")
448
- if node_type in ["ChatInput", "textInput", "Input", "numberInput"]:
449
- input_nodes.append(node)
450
- elif node_type in ["ChatOutput", "textOutput", "Output"]:
451
- output_nodes.append(node)
452
-
453
- # Create input components
454
- input_components = {}
455
-
456
- if input_nodes:
457
- gr.Markdown("#### ๐Ÿ“ฅ Inputs")
458
- for node in input_nodes:
459
- node_id = node.get("id")
460
- label = node.get("data", {}).get("label", node_id)
461
- node_type = node.get("type")
462
-
463
- # Get default value
464
- template = node.get("data", {}).get("template", {})
465
- default_value = template.get("input_value", {}).get("value", "")
466
-
467
- if node_type == "numberInput":
468
- input_components[node_id] = gr.Number(
469
- label=label,
470
- value=float(default_value) if default_value else 0
471
- )
472
- else:
473
- input_components[node_id] = gr.Textbox(
474
- label=label,
475
- value=default_value,
476
- lines=2,
477
- placeholder="Enter your input..."
478
- )
479
-
480
- # Execute button
481
- execute_btn = gr.Button("๐ŸŽฏ Execute", variant="primary")
482
-
483
- # Create output components
484
- output_components = {}
485
-
486
- if output_nodes:
487
- gr.Markdown("#### ๐Ÿ“ค Outputs")
488
- for node in output_nodes:
489
- node_id = node.get("id")
490
- label = node.get("data", {}).get("label", node_id)
491
-
492
- output_components[node_id] = gr.Textbox(
493
- label=label,
494
- interactive=False,
495
- lines=3
496
- )
497
-
498
- # Execution log
499
- gr.Markdown("#### ๐Ÿ“Š Execution Log")
500
- log_output = gr.Textbox(
501
- label="Log",
502
- interactive=False,
503
- lines=5
504
- )
505
-
506
- # Define execution handler
507
- def execute_ui_workflow(*input_values):
508
- # Create input dictionary
509
- inputs_dict = {}
510
- input_keys = list(input_components.keys())
511
- for i, key in enumerate(input_keys):
512
- if i < len(input_values):
513
- inputs_dict[key] = input_values[i]
514
-
515
- # Check API status
516
- log = "=== Workflow Execution Started ===\n"
517
- log += f"Inputs provided: {len(inputs_dict)}\n"
518
-
519
- # API ์ƒํƒœ ํ™•์ธ
520
- friendli_token = os.getenv("FRIENDLI_TOKEN")
521
- openai_key = os.getenv("OPENAI_API_KEY")
522
- anthropic_key = os.getenv("ANTHROPIC_API_KEY")
523
-
524
- log += "\nAPI Status:\n"
525
- log += f"- FRIENDLI_TOKEN: {'โœ… Found' if friendli_token else 'โŒ Not found'}\n"
526
- log += f"- OPENAI_API_KEY: {'โœ… Found' if openai_key else 'โŒ Not found'}\n"
527
- log += f"- ANTHROPIC_API_KEY: {'โœ… Found' if anthropic_key else 'โŒ Not found'}\n"
528
-
529
- if not friendli_token and not openai_key and not anthropic_key:
530
- log += "\nโš ๏ธ No API keys found. Results will be simulated.\n"
531
- log += "To get real AI responses, set API keys in environment variables.\n"
532
-
533
- log += "\n--- Processing Nodes ---\n"
534
-
535
- try:
536
- results = execute_workflow_simple(workflow_data, inputs_dict)
537
-
538
- # Prepare outputs
539
- output_values = []
540
- for node_id in output_components.keys():
541
- value = results.get(node_id, "No output")
542
- output_values.append(value)
543
-
544
- # Log ๊ธธ์ด ์ œํ•œ
545
- display_value = value[:100] + "..." if len(str(value)) > 100 else value
546
- log += f"\nOutput [{node_id}]: {display_value}\n"
547
-
548
- log += "\n=== Execution Completed Successfully! ===\n"
549
- output_values.append(log)
550
-
551
- return output_values
552
-
553
- except Exception as e:
554
- error_msg = f"โŒ Error: {str(e)}"
555
- log += f"\n{error_msg}\n"
556
- log += "=== Execution Failed ===\n"
557
- return [error_msg] * len(output_components) + [log]
558
-
559
- # Connect execution
560
- all_inputs = list(input_components.values())
561
- all_outputs = list(output_components.values()) + [log_output]
562
-
563
- execute_btn.click(
564
- fn=execute_ui_workflow,
565
- inputs=all_inputs,
566
- outputs=all_outputs
567
- )
568
-
569
- # โ”€โ”€โ”€ Event Handlers โ”€โ”€โ”€
570
-
571
- # Load workflow (from text or file)
572
- def load_workflow(json_text, file_obj):
573
- data, status = load_json_from_text_or_file(json_text, file_obj)
574
- if data:
575
- return data, status, json_text if not file_obj else export_pretty(data)
576
- else:
577
- return None, status, gr.update()
578
-
579
- btn_load.click(
580
- fn=load_workflow,
581
- inputs=[import_json_text, file_upload],
582
- outputs=[loaded_data, status_text, import_json_text]
583
- ).then(
584
- fn=lambda current_trigger: not current_trigger,
585
- inputs=trigger_update,
586
- outputs=trigger_update
587
- )
588
-
589
- # Auto-load when file is uploaded
590
- file_upload.change(
591
- fn=load_workflow,
592
- inputs=[import_json_text, file_upload],
593
- outputs=[loaded_data, status_text, import_json_text]
594
- ).then(
595
- fn=lambda current_trigger: not current_trigger,
596
- inputs=trigger_update,
597
- outputs=trigger_update
598
- )
599
-
600
- # Load sample
601
- btn_sample.click(
602
- fn=lambda: (create_sample_workflow(), "โœ… Sample loaded", export_pretty(create_sample_workflow())),
603
- outputs=[loaded_data, status_text, import_json_text]
604
- ).then(
605
- fn=lambda current_trigger: not current_trigger,
606
- inputs=trigger_update,
607
- outputs=trigger_update
608
- )
609
-
610
- # Preview current workflow
611
- btn_preview.click(
612
- fn=export_pretty,
613
- inputs=loaded_data,
614
- outputs=export_preview
615
- )
616
-
617
- # Download workflow
618
- btn_download.click(
619
- fn=export_file,
620
- inputs=loaded_data
621
- )
622
-
623
- # Generate UI execution
624
- btn_execute_ui.click(
625
- fn=lambda data: data,
626
- inputs=loaded_data,
627
- outputs=ui_workflow_data
628
- )
629
-
630
- # Auto-update export preview when workflow changes
631
- loaded_data.change(
632
- fn=export_pretty,
633
- inputs=loaded_data,
634
- outputs=export_preview
635
- )
636
-
637
-
638
- # -------------------------------------------------------------------
639
- # ๐Ÿš€ ์‹คํ–‰
640
- # -------------------------------------------------------------------
641
- if __name__ == "__main__":
642
- demo.launch(server_name="0.0.0.0", show_error=True)