openfree commited on
Commit
15d8fbe
Β·
verified Β·
1 Parent(s): 2a151b4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +110 -10
app.py CHANGED
@@ -332,7 +332,6 @@ def create_sample_workflow(example_type="basic"):
332
  # 기본값은 basic
333
  return create_sample_workflow("basic")
334
 
335
- # 배포λ₯Ό μœ„ν•œ 독립 μ•± 생성 ν•¨μˆ˜
336
  def generate_standalone_app(workflow_data: dict, app_name: str, app_description: str) -> str:
337
  """μ›Œν¬ν”Œλ‘œμš°λ₯Ό 독립적인 Gradio μ•±μœΌλ‘œ λ³€ν™˜"""
338
 
@@ -468,7 +467,13 @@ def execute_workflow(*input_values):
468
  except Exception as e:
469
  results[node_id] = f"[VIDraft Error: {{str(e)}}]"
470
  else:
471
- results[node_id] = f"[Simulated Response: {{input_text[:50]}}...]"
 
 
 
 
 
 
472
 
473
  elif node_type in ["ChatOutput", "textOutput", "Output"]:
474
  # Get connected result
@@ -488,6 +493,19 @@ with gr.Blocks(title="{app_name}", theme=gr.themes.Soft()) as demo:
488
  gr.Markdown("# {app_name}")
489
  gr.Markdown("{app_description}")
490
 
 
 
 
 
 
 
 
 
 
 
 
 
 
491
  # Extract nodes
492
  nodes = WORKFLOW_DATA.get("nodes", [])
493
  input_nodes = [n for n in nodes if n.get("type") in ["ChatInput", "textInput", "Input", "numberInput"]]
@@ -540,7 +558,8 @@ requests
540
  """
541
 
542
  def deploy_to_huggingface(workflow_data: dict, app_name: str, app_description: str,
543
- hf_token: str, space_name: str, is_private: bool = False) -> dict:
 
544
  """Deploy workflow to Hugging Face Space"""
545
 
546
  if not HF_HUB_AVAILABLE:
@@ -562,6 +581,8 @@ def deploy_to_huggingface(workflow_data: dict, app_name: str, app_description: s
562
  # Generate files
563
  app_code = generate_standalone_app(workflow_data, app_name, app_description)
564
  requirements = generate_requirements_txt()
 
 
565
  readme = f"""---
566
  title: {app_name}
567
  emoji: 🐭
@@ -577,6 +598,27 @@ pinned: false
577
 
578
  {app_description}
579
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
580
  Generated by MOUSE Workflow
581
  """
582
 
@@ -602,12 +644,25 @@ Generated by MOUSE Workflow
602
  repo_type="space"
603
  )
604
 
 
 
 
 
 
 
 
 
 
 
 
 
605
  space_url = f"https://huggingface.co/spaces/{repo_id.repo_id}"
606
 
607
  return {
608
  "success": True,
609
  "space_url": space_url,
610
- "message": f"Successfully deployed to {space_url}"
 
611
  }
612
 
613
  except Exception as e:
@@ -1007,6 +1062,24 @@ with gr.Blocks(title="🐭 MOUSE Workflow", theme=gr.themes.Soft(), css=CSS) as
1007
  placeholder="hf_...",
1008
  info="Get your token from huggingface.co/settings/tokens"
1009
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1010
  deploy_private = gr.Checkbox(
1011
  label="Make Space Private",
1012
  value=False
@@ -1025,6 +1098,13 @@ with gr.Blocks(title="🐭 MOUSE Workflow", theme=gr.themes.Soft(), css=CSS) as
1025
  lines=20
1026
  )
1027
 
 
 
 
 
 
 
 
1028
  # ─── UI Execution Section ───
1029
  with gr.Column(elem_classes=["ui-execution-section"]):
1030
  gr.Markdown("## πŸš€ UI Execution")
@@ -1308,7 +1388,8 @@ with gr.Blocks(title="🐭 MOUSE Workflow", theme=gr.themes.Soft(), css=CSS) as
1308
  )
1309
 
1310
  # Deploy handler
1311
- def handle_deploy(workflow_data, app_name, app_description, hf_token, space_name, is_private):
 
1312
  if not workflow_data:
1313
  return "❌ No workflow loaded. Please load a workflow first."
1314
 
@@ -1329,6 +1410,10 @@ with gr.Blocks(title="🐭 MOUSE Workflow", theme=gr.themes.Soft(), css=CSS) as
1329
  # Show deploying status
1330
  yield "πŸ”„ Deploying to Hugging Face Space..."
1331
 
 
 
 
 
1332
  # Deploy
1333
  result = deploy_to_huggingface(
1334
  workflow_data=workflow_data,
@@ -1336,16 +1421,25 @@ with gr.Blocks(title="🐭 MOUSE Workflow", theme=gr.themes.Soft(), css=CSS) as
1336
  app_description=app_description,
1337
  hf_token=hf_token,
1338
  space_name=space_name,
1339
- is_private=is_private
 
 
1340
  )
1341
 
1342
  if result["success"]:
 
 
 
 
 
 
1343
  yield f"""βœ… **Deployment Successful!**
1344
 
1345
  πŸŽ‰ Your workflow has been deployed to:
1346
  [{result['space_url']}]({result['space_url']})
1347
 
1348
  ⏱️ The Space will be ready in a few minutes. Building usually takes 2-5 minutes.
 
1349
 
1350
  πŸ“ **Next Steps:**
1351
  1. Visit your Space URL
@@ -1353,16 +1447,22 @@ with gr.Blocks(title="🐭 MOUSE Workflow", theme=gr.themes.Soft(), css=CSS) as
1353
  3. Share the URL with others
1354
  4. You can edit the code directly on Hugging Face if needed
1355
 
1356
- πŸ’‘ **Tip:** Set your API keys as secrets in the Space settings:
1357
- - FRIENDLI_TOKEN (for VIDraft)
1358
- - OPENAI_API_KEY (for OpenAI)
 
 
 
 
 
1359
  """
1360
  else:
1361
  yield f"❌ **Deployment Failed**\n\nError: {result['error']}"
1362
 
1363
  btn_deploy.click(
1364
  fn=handle_deploy,
1365
- inputs=[loaded_data, deploy_name, deploy_description, deploy_token, deploy_space_name, deploy_private],
 
1366
  outputs=deploy_status
1367
  )
1368
 
 
332
  # 기본값은 basic
333
  return create_sample_workflow("basic")
334
 
 
335
  def generate_standalone_app(workflow_data: dict, app_name: str, app_description: str) -> str:
336
  """μ›Œν¬ν”Œλ‘œμš°λ₯Ό 독립적인 Gradio μ•±μœΌλ‘œ λ³€ν™˜"""
337
 
 
467
  except Exception as e:
468
  results[node_id] = f"[VIDraft Error: {{str(e)}}]"
469
  else:
470
+ # Show which API key is missing
471
+ if provider == "OpenAI":
472
+ results[node_id] = "[OpenAI API key not found. Please set OPENAI_API_KEY in Space secrets]"
473
+ elif provider == "VIDraft":
474
+ results[node_id] = "[VIDraft API key not found. Please set FRIENDLI_TOKEN in Space secrets]"
475
+ else:
476
+ results[node_id] = f"[Simulated Response: {{input_text[:50]}}...]"
477
 
478
  elif node_type in ["ChatOutput", "textOutput", "Output"]:
479
  # Get connected result
 
493
  gr.Markdown("# {app_name}")
494
  gr.Markdown("{app_description}")
495
 
496
+ # API Status Check
497
+ vidraft_token = os.getenv("FRIENDLI_TOKEN")
498
+ openai_key = os.getenv("OPENAI_API_KEY")
499
+
500
+ if not vidraft_token and not openai_key:
501
+ gr.Markdown("""
502
+ ⚠️ **API Keys Required**
503
+
504
+ Please set the following environment variables in Space settings β†’ Secrets:
505
+ - `FRIENDLI_TOKEN` for VIDraft (Gemma-3-r1984-27B)
506
+ - `OPENAI_API_KEY` for OpenAI (gpt-4.1-mini)
507
+ """)
508
+
509
  # Extract nodes
510
  nodes = WORKFLOW_DATA.get("nodes", [])
511
  input_nodes = [n for n in nodes if n.get("type") in ["ChatInput", "textInput", "Input", "numberInput"]]
 
558
  """
559
 
560
  def deploy_to_huggingface(workflow_data: dict, app_name: str, app_description: str,
561
+ hf_token: str, space_name: str, is_private: bool = False,
562
+ include_friendli_token: bool = False, friendli_token: str = None) -> dict:
563
  """Deploy workflow to Hugging Face Space"""
564
 
565
  if not HF_HUB_AVAILABLE:
 
581
  # Generate files
582
  app_code = generate_standalone_app(workflow_data, app_name, app_description)
583
  requirements = generate_requirements_txt()
584
+
585
+ # README with API setup instructions
586
  readme = f"""---
587
  title: {app_name}
588
  emoji: 🐭
 
598
 
599
  {app_description}
600
 
601
+ ## πŸ”‘ API Configuration
602
+
603
+ This app requires API keys to function properly. Please add the following secrets in your Space settings:
604
+
605
+ ### For VIDraft (Gemma-3-r1984-27B):
606
+ - **Secret Name**: `FRIENDLI_TOKEN`
607
+ - **Secret Value**: Your VIDraft API token (starts with `flp_`)
608
+ {f"- **Status**: βœ… Already configured" if include_friendli_token and friendli_token else "- **Status**: ❌ Needs to be added manually"}
609
+
610
+ ### For OpenAI (gpt-4.1-mini):
611
+ - **Secret Name**: `OPENAI_API_KEY`
612
+ - **Secret Value**: Your OpenAI API key (starts with `sk-`)
613
+ - **Status**: ❌ Needs to be added manually
614
+
615
+ ## πŸ“ How to add secrets:
616
+ 1. Go to your Space settings
617
+ 2. Click on "Repository secrets"
618
+ 3. Add the required secrets
619
+ 4. Restart your Space
620
+
621
+ ---
622
  Generated by MOUSE Workflow
623
  """
624
 
 
644
  repo_type="space"
645
  )
646
 
647
+ # Add secrets if requested
648
+ if include_friendli_token and friendli_token:
649
+ try:
650
+ # Add FRIENDLI_TOKEN as a secret
651
+ api.add_space_secret(
652
+ repo_id=repo_id.repo_id,
653
+ key="FRIENDLI_TOKEN",
654
+ value=friendli_token
655
+ )
656
+ except Exception as e:
657
+ print(f"Warning: Could not add FRIENDLI_TOKEN secret: {e}")
658
+
659
  space_url = f"https://huggingface.co/spaces/{repo_id.repo_id}"
660
 
661
  return {
662
  "success": True,
663
  "space_url": space_url,
664
+ "message": f"Successfully deployed to {space_url}",
665
+ "friendli_token_added": include_friendli_token and friendli_token
666
  }
667
 
668
  except Exception as e:
 
1062
  placeholder="hf_...",
1063
  info="Get your token from huggingface.co/settings/tokens"
1064
  )
1065
+
1066
+ # FRIENDLI_TOKEN μ„€μ •
1067
+ with gr.Group():
1068
+ include_friendli = gr.Checkbox(
1069
+ label="Include FRIENDLI_TOKEN in Space Secrets",
1070
+ value=True,
1071
+ info="Automatically add VIDraft API token to your Space"
1072
+ )
1073
+
1074
+ friendli_token_input = gr.Textbox(
1075
+ label="FRIENDLI_TOKEN (optional)",
1076
+ type="password",
1077
+ placeholder="flp_...",
1078
+ value=os.getenv("FRIENDLI_TOKEN", ""),
1079
+ info="Leave empty to use current token, or enter a new one",
1080
+ visible=True
1081
+ )
1082
+
1083
  deploy_private = gr.Checkbox(
1084
  label="Make Space Private",
1085
  value=False
 
1098
  lines=20
1099
  )
1100
 
1101
+ # Toggle FRIENDLI token input visibility
1102
+ include_friendli.change(
1103
+ fn=lambda x: gr.update(visible=x),
1104
+ inputs=include_friendli,
1105
+ outputs=friendli_token_input
1106
+ )
1107
+
1108
  # ─── UI Execution Section ───
1109
  with gr.Column(elem_classes=["ui-execution-section"]):
1110
  gr.Markdown("## πŸš€ UI Execution")
 
1388
  )
1389
 
1390
  # Deploy handler
1391
+ def handle_deploy(workflow_data, app_name, app_description, hf_token, space_name,
1392
+ include_friendli, friendli_token, is_private):
1393
  if not workflow_data:
1394
  return "❌ No workflow loaded. Please load a workflow first."
1395
 
 
1410
  # Show deploying status
1411
  yield "πŸ”„ Deploying to Hugging Face Space..."
1412
 
1413
+ # Use current FRIENDLI_TOKEN if not provided
1414
+ if include_friendli and not friendli_token:
1415
+ friendli_token = os.getenv("FRIENDLI_TOKEN", "")
1416
+
1417
  # Deploy
1418
  result = deploy_to_huggingface(
1419
  workflow_data=workflow_data,
 
1421
  app_description=app_description,
1422
  hf_token=hf_token,
1423
  space_name=space_name,
1424
+ is_private=is_private,
1425
+ include_friendli_token=include_friendli,
1426
+ friendli_token=friendli_token
1427
  )
1428
 
1429
  if result["success"]:
1430
+ secret_msg = ""
1431
+ if result.get("friendli_token_added"):
1432
+ secret_msg = "\n\nβœ… **FRIENDLI_TOKEN has been automatically added to Space secrets!**"
1433
+ elif include_friendli and not friendli_token:
1434
+ secret_msg = "\n\n⚠️ **No FRIENDLI_TOKEN provided. Please add it manually in Space settings.**"
1435
+
1436
  yield f"""βœ… **Deployment Successful!**
1437
 
1438
  πŸŽ‰ Your workflow has been deployed to:
1439
  [{result['space_url']}]({result['space_url']})
1440
 
1441
  ⏱️ The Space will be ready in a few minutes. Building usually takes 2-5 minutes.
1442
+ {secret_msg}
1443
 
1444
  πŸ“ **Next Steps:**
1445
  1. Visit your Space URL
 
1447
  3. Share the URL with others
1448
  4. You can edit the code directly on Hugging Face if needed
1449
 
1450
+ πŸ’‘ **API Keys Configuration:**
1451
+ - FRIENDLI_TOKEN (for VIDraft): {"βœ… Automatically added" if result.get("friendli_token_added") else "❌ Add manually in Space settings"}
1452
+ - OPENAI_API_KEY (for OpenAI): ❌ Add manually in Space settings if needed
1453
+
1454
+ πŸ“š **Space Management:**
1455
+ - To update secrets: Go to Space settings β†’ Repository secrets
1456
+ - To restart Space: Go to Space settings β†’ Factory reboot
1457
+ - To make changes: Edit files directly in the Space repository
1458
  """
1459
  else:
1460
  yield f"❌ **Deployment Failed**\n\nError: {result['error']}"
1461
 
1462
  btn_deploy.click(
1463
  fn=handle_deploy,
1464
+ inputs=[loaded_data, deploy_name, deploy_description, deploy_token, deploy_space_name,
1465
+ include_friendli, friendli_token_input, deploy_private],
1466
  outputs=deploy_status
1467
  )
1468