broadfield-dev commited on
Commit
f3b5de5
Β·
verified Β·
1 Parent(s): 1082c65

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +242 -472
app.py CHANGED
@@ -1,4 +1,5 @@
1
-
 
2
  import gradio as gr
3
  import re
4
  import json
@@ -7,74 +8,29 @@ import tempfile
7
  import shlex
8
  from huggingface_hub import HfApi
9
 
10
- try:
11
- from build_logic import (
12
- # build_logic_create_space, # This is now handled by apply_staged_changes for AI
13
- _get_api_token as build_logic_get_api_token,
14
- whoami as build_logic_whoami,
15
- list_space_files_for_browsing,
16
- get_space_repository_info, # Used for initial load and status
17
- get_space_file_content,
18
- update_space_file, # Keep for manual editing
19
- parse_markdown as build_logic_parse_markdown,
20
- delete_space_file as build_logic_delete_space_file, # Keep for manual deletion
21
- get_space_runtime_status,
22
- apply_staged_changes, # NEW: Main function for applying AI changes
23
- build_logic_set_space_privacy, # Moved from app.py
24
- build_logic_delete_space # Moved from app.py
25
- )
26
- print("build_logic.py loaded successfully.")
 
 
27
 
28
- from model_logic import (
29
- get_available_providers,
30
- get_models_for_provider,
31
- get_default_model_for_provider,
32
- generate_stream
33
- )
34
- print("model_logic.py loaded successfully.")
35
- except ImportError as e:
36
- print(f"Warning: Local modules (build_logic.py, model_logic.py) not found. Using dummy functions. Error: {e}")
37
- def get_available_providers(): return ["DummyProvider", "Groq"] # Added Groq for testing
38
- def get_models_for_provider(p):
39
- if p == 'Groq': return ["llama3-8b-8192", "gemma-7b-it"]
40
- return ["dummy-model"]
41
- def get_default_model_for_provider(p):
42
- if p == 'Groq': return "llama3-8b-8192"
43
- return "dummy-model"
44
- # The dummy function already accepts the api_key argument ('a')
45
- def generate_stream(p, m, a, msgs):
46
- yield f"Using dummy model. API Key provided: {'Yes' if a else 'No'}. This is a dummy response as local modules were not found.\n" + bbb + "text\n### File: dummy.txt\nHello from dummy model!\n" + bbb
47
- # Dummy build_logic functions
48
- def build_logic_create_space(*args, **kwargs): return "Error: build_logic not found (Dummy)."
49
- def build_logic_get_api_token(key): return (key or os.getenv("HF_TOKEN"), None)
50
- def build_logic_whoami(token): return {"name": "dummy_user"}
51
- def list_space_files_for_browsing(*args): return ([], "Error: build_logic not found (Dummy).")
52
- def get_space_repository_info(*args): return (None, [], "Error: build_logic not found (Dummy).")
53
- def get_space_file_content(*args): return ("", "Error: build_logic not found (Dummy).")
54
- def update_space_file(*args, **kwargs): return "Error: build_logic not found (Dummy)."
55
- def build_logic_parse_markdown(md):
56
- # Dummy parser attempts to find files for testing
57
- files = []
58
- file_pattern = re.compile(r"### File:\s*(?P<filename_line>[^\n]+)\n(?:```(?P<lang>[\w\.\-\+]*)\n(?P<code>[\s\S]*?)\n```|(?P<binary_msg>\[Binary file(?: - [^\]]+)?\]))")
59
- for match in file_pattern.finditer(md):
60
- filename = _clean_filename(match.group("filename_line"))
61
- if filename: files.append({"path": filename, "content": match.group("code") or match.group("binary_msg") or ""})
62
- return {"repo_name_md": "dummy/space", "owner_md": "dummy", "files": files}
63
- def build_logic_delete_space_file(*args): return "Error: build_logic not found (Dummy)."
64
- def get_space_runtime_status(*args): return ({"stage": "DUMMY", "hardware": "dummy", "status": "dummy"}, "Error: build_logic not found (Dummy).")
65
- def apply_staged_changes(*args, **kwargs): return "Error: apply_staged_changes not found (Dummy).", [], None
66
- def build_logic_set_space_privacy(*args): return "Error: build_logic_set_space_privacy not found (Dummy)."
67
- def build_logic_delete_space(*args): return "Error: build_logic_delete_space not found (Dummy)."
68
-
69
- # --- END: Dummy functions ---
70
-
71
-
72
- # --- CORE FIX: Define triple backticks safely to prevent Markdown rendering issues ---
73
  backtick = chr(96)
74
  bbb = f'{backtick}{backtick}{backtick}'
75
 
76
- # State variable to hold the *current* representation of the Space's files and structure.
77
- # This is populated on load and updated by AI outputs or manual edits/deletes.
78
  parsed_code_blocks_state_cache = []
79
  BOT_ROLE_NAME = "assistant"
80
 
@@ -127,13 +83,9 @@ You will be provided with the current state of the files in the Space the user i
127
  If no code or actions are requested, respond conversationally and help the user understand the Space Commander's capabilities.
128
  """
129
 
130
- # --- Helper Functions ---
131
- # Keep existing helper functions (_infer_lang_from_filename, _clean_filename, etc.)
132
- # Refine _parse_chat_stream_logic to integrate with the state cache
133
-
134
  def escape_html_for_markdown(text):
135
  if not isinstance(text, str): return ""
136
- return text.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;") # Use &amp; for safety
137
 
138
  def _infer_lang_from_filename(filename):
139
  if not filename: return "plaintext"
@@ -162,23 +114,13 @@ def _infer_lang_from_filename(filename):
162
 
163
  def _clean_filename(filename_line_content):
164
  text = filename_line_content.strip()
165
- # Remove markdown formatting characters aggressively
166
  text = re.sub(r'[`\*_#]+', '', text).strip()
167
- # Remove parenthesized descriptions
168
  text = re.split(r'\s*\(', text, 1)[0].strip()
169
- # Remove leading/trailing quotes or colons sometimes added by models
170
  text = text.strip('\'":;,')
171
- # Ensure it doesn't start with '/'
172
  text = text.lstrip('/')
173
  return text
174
 
175
  def _parse_and_update_state_cache(latest_bot_message_content, current_files_state):
176
- """
177
- Parses the latest bot message content for file blocks and updates the
178
- global state cache with the latest version of each file.
179
- Returns the updated state cache and a list of filenames proposed in this turn.
180
- """
181
- # Start with a dictionary representation of the current state for easy updates
182
  current_files_dict = {f["filename"]: f.copy() for f in current_files_state if not f.get("is_structure_block")}
183
  structure_block_state = next((b for b in current_files_state if b.get("is_structure_block")), None)
184
 
@@ -187,18 +129,14 @@ def _parse_and_update_state_cache(latest_bot_message_content, current_files_stat
187
  file_pattern = re.compile(r"### File:\s*(?P<filename_line>[^\n]+)\n(?:```(?P<lang>[\w\.\-\+]*)\n(?P<code>[\s\S]*?)\n```|(?P<binary_msg>\[Binary file(?: - [^\]]+)?\]))", re.MULTILINE)
188
  structure_pattern = re.compile(r"## File Structure\n```(?:(?P<struct_lang>[\w.-]*)\n)?(?P<structure_code>[\s\S]*?)\n```", re.MULTILINE)
189
 
190
- # Parse File Structure block if present in the latest message (overwrites previous structure block)
191
  structure_match = structure_pattern.search(content)
192
  if structure_match:
193
  structure_block_state = {"filename": "File Structure (from AI)", "language": structure_match.group("struct_lang") or "plaintext", "code": structure_match.group("structure_code").strip(), "is_binary": False, "is_structure_block": True}
194
 
195
  current_message_proposed_filenames = []
196
- # Parse file blocks from the latest message
197
  for match in file_pattern.finditer(content):
198
  filename = _clean_filename(match.group("filename_line"))
199
- if not filename:
200
- print(f"Warning: Skipped file block due to empty/invalid filename parsing: '{match.group('filename_line').strip()}'")
201
- continue # Skip if filename couldn't be parsed
202
 
203
  lang, code_block, binary_msg = match.group("lang"), match.group("code"), match.group("binary_msg")
204
 
@@ -207,67 +145,42 @@ def _parse_and_update_state_cache(latest_bot_message_content, current_files_stat
207
  if code_block is not None:
208
  item_data["code"] = code_block.strip()
209
  item_data["language"] = (lang.strip().lower() if lang else _infer_lang_from_filename(filename))
210
- item_data["is_binary"] = False # Ensure explicit False if it's a code block
211
  elif binary_msg is not None:
212
  item_data["code"] = binary_msg.strip()
213
  item_data["language"] = "binary"
214
  item_data["is_binary"] = True
215
- else:
216
- # This case shouldn't be hit with the current regex, but as a safeguard
217
- print(f"Warning: Skipped file block for '{filename}' due to missing code or binary marker.")
218
- continue # Skip if content is neither code nor binary marker
219
 
220
- # Update or add the file in the dictionary state
221
  current_files_dict[filename] = item_data
222
  current_message_proposed_filenames.append(filename)
223
 
224
-
225
- # Convert dictionary back to a list, add structure block if present
226
  updated_parsed_blocks = list(current_files_dict.values())
227
  if structure_block_state:
228
- updated_parsed_blocks.insert(0, structure_block_state) # Add structure block at the beginning
229
 
230
- # Sort for consistent ordering
231
  updated_parsed_blocks.sort(key=lambda b: (0, b["filename"]) if b.get("is_structure_block") else (1, b["filename"]))
232
 
233
  return updated_parsed_blocks, current_message_proposed_filenames
234
 
235
-
236
  def _export_selected_logic(selected_filenames, space_line_name_for_md, parsed_blocks_for_export):
237
- """Generates the Markdown representation of the space state or selected files."""
238
  results = {"output_str": "", "error_message": None, "download_filepath": None}
239
- # Only include blocks that are files (not structure) for content export/display
240
  file_blocks_for_export = [b for b in parsed_blocks_for_export if not b.get("is_structure_block")]
241
-
242
- # Determine filenames present in the state that can potentially be exported/listed
243
  all_filenames_in_state = sorted(list(set(b["filename"] for b in file_blocks_for_export)))
244
 
245
- if not all_filenames_in_state:
246
- results["output_str"] = f"# Space: {space_line_name_for_md}\n## File Structure\n{bbb}\nπŸ“ Root\n{bbb}\n\n*No files in state to list structure or export.*"
247
- # Even if no files, create a temp file for download button functionality
248
- try:
249
- with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".md", encoding='utf-8') as tmpfile:
250
- tmpfile.write(results["output_str"]); results["download_filepath"] = tmpfile.name
251
- except Exception as e: print(f"Error creating temp file for empty state export: {e}")
252
- return results
253
-
254
  output_lines = [f"# Space: {space_line_name_for_md}"]
255
 
256
- # Add File Structure block if it exists in the state
257
  structure_block = next((b for b in parsed_blocks_for_export if b.get("is_structure_block")), None)
258
  if structure_block:
259
  output_lines.extend(["## File Structure", bbb, structure_block["code"].strip(), bbb, ""])
260
  else:
261
- # If no AI-generated structure block, create a basic one from file list
262
  output_lines.extend(["## File Structure", bbb, "πŸ“ Root"])
263
  if all_filenames_in_state:
264
- for fname in all_filenames_in_state: output_lines.append(f" πŸ“„ {fname}") # Basic flattening
265
  output_lines.extend([bbb, ""])
266
 
267
  output_lines.append("Below are the contents of all files in the space:\n")
268
 
269
- # Filter blocks to export content based on selection
270
- # If selected_filenames is None or empty, export all file blocks
271
  blocks_to_export_content = sorted([b for b in file_blocks_for_export if not selected_filenames or b["filename"] in selected_filenames], key=lambda b: b["filename"])
272
 
273
  exported_content_count = 0
@@ -275,31 +188,27 @@ def _export_selected_logic(selected_filenames, space_line_name_for_md, parsed_bl
275
  output_lines.append(f"### File: {block['filename']}")
276
  content = block.get('code', '')
277
  if block.get('is_binary') or content.startswith(("[Binary file", "[Error loading content:", "[Binary or Skipped file]")):
278
- # For binary/error placeholders, just print the marker line
279
  output_lines.append(content)
280
  else:
281
- # For text content, wrap in code block
282
- lang = block.get('language', 'plaintext') or 'plaintext' # Ensure language is not None or empty
283
  output_lines.extend([f"{bbb}{lang}", content, bbb])
284
- output_lines.append("") # Add blank line after each file block definition
285
  exported_content_count += 1
286
 
287
  if not exported_content_count:
288
- if selected_filenames:
289
- output_lines.append("*No selected files have editable content in the state.*")
290
- # else: already handled by the initial check for all_filenames_in_state
291
 
292
  final_output_str = "\n".join(output_lines)
293
  results["output_str"] = final_output_str
294
  try:
295
- # Create a temporary file for the download button
296
  with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".md", encoding='utf-8') as tmpfile:
297
- tmpfile.write(final_output_str)
298
- results["download_filepath"] = tmpfile.name
299
  except Exception as e:
300
  print(f"Error creating temp file for download: {e}")
301
  results["error_message"] = "Could not prepare file for download."
302
- results["download_filepath"] = None # Ensure download is disabled on error
303
 
304
  return results
305
 
@@ -308,21 +217,17 @@ def _convert_gr_history_to_api_messages(system_prompt, gr_history, current_user_
308
  for user_msg, bot_msg in gr_history:
309
  if user_msg: messages.append({"role": "user", "content": user_msg})
310
  if bot_msg and isinstance(bot_msg, str): messages.append({"role": BOT_ROLE_NAME, "content": bot_msg})
311
- # Append the current user message last if provided
312
  if current_user_message: messages.append({"role": "user", "content": current_user_message})
313
  return messages
314
 
315
  def _generate_ui_outputs_from_cache(owner, space_name):
316
- """Generates the Markdown displays and download link from the global state cache."""
317
  global parsed_code_blocks_state_cache
318
- # Markdown preview displays the *latest* version of each file from the cache
319
  preview_md_val = "*No files in cache to display.*"
320
- # Formatted markdown is the full representation including structure and all files
321
  space_line_name = f"{owner}/{space_name}" if owner and space_name else (owner or space_name or "your-space")
322
  export_result = _export_selected_logic(None, space_line_name, parsed_code_blocks_state_cache)
323
  formatted_md_val = export_result["output_str"]
324
  download_file = export_result["download_filepath"]
325
- formatted_md_val = formatted_md_val or "*Load or define a Space to see its Markdown structure.*" # Fallback text
326
 
327
  if parsed_code_blocks_state_cache:
328
  preview_md_lines = ["## Detected/Updated Files & Content (Latest Versions):"]
@@ -340,128 +245,84 @@ def _generate_ui_outputs_from_cache(owner, space_name):
340
 
341
  content = block.get('code', '')
342
  if block.get('is_binary') or content.startswith(("[Binary file", "[Error loading content:", "[Binary or Skipped file]")):
343
- preview_md_lines.append(f"\n`{escape_html_for_markdown(content.strip())}`\n") # Strip for preview markdown
344
  else:
345
- # Use 3 backticks for code block in preview
346
  lang = block.get('language', 'plaintext') or 'plaintext'
347
- preview_md_lines.append(f"\n{bbb}{lang}\n{content.strip()}\n{bbb}\n") # Strip for preview markdown
348
  preview_md_val = "\n".join(preview_md_lines)
349
 
350
-
351
  return formatted_md_val, preview_md_val, gr.update(value=download_file, interactive=download_file is not None)
352
 
353
- # --- NEW: Core logic for Change Staging and Confirmation ---
354
-
355
  def generate_and_stage_changes(ai_response_content, current_files_state, hf_owner_name, hf_repo_name):
356
- """
357
- Parses AI response, compares with current state (from cache),
358
- and generates a structured changeset and a markdown summary.
359
- """
360
  changeset = []
361
  current_files_dict = {f["filename"]: f for f in current_files_state if not f.get("is_structure_block")}
362
-
363
- # 1. Parse AI response for actions and file blocks
364
- # Use build_logic_parse_markdown to get file blocks in the AI's desired format
365
  ai_parsed_md = build_logic_parse_markdown(ai_response_content)
366
- ai_proposed_files_list = ai_parsed_md.get("files", []) # List of {"path": ..., "content": ...}
367
-
368
- # Convert AI proposed files list to dict for easier lookup and comparison
369
  ai_proposed_files_dict = {f["path"]: f for f in ai_proposed_files_list}
370
 
371
-
372
- # Parse HF_ACTION commands from AI response using regex on the raw content
373
  action_pattern = re.compile(r"### HF_ACTION:\s*(?P<command_line>[^\n]+)", re.MULTILINE)
374
  for match in action_pattern.finditer(ai_response_content):
375
- cmd_parts = shlex.split(match.group("command_line").strip())
376
- if not cmd_parts: continue
377
- command, args = cmd_parts[0].upper(), cmd_parts[1:]
378
-
379
- # Add actions to the changeset
380
- if command == "CREATE_SPACE" and args:
381
- # The AI command specifies the target repo_id
382
- repo_id = args[0]
383
- sdk = "gradio" # default
384
- private = False # default
385
- if '--sdk' in args:
386
- try: sdk = args[args.index('--sdk') + 1]
387
- except IndexError: print("Warning: CREATE_SPACE --sdk requires an argument.")
388
- if '--private' in args:
389
- try: private_str = args[args.index('--private') + 1].lower()
390
- except IndexError: print("Warning: CREATE_SPACE --private requires an argument.")
391
- else: private = private_str == 'true'
392
- # Action includes target repo, sdk, and private setting
393
- changeset.append({"type": "CREATE_SPACE", "repo_id": repo_id, "sdk": sdk, "private": private})
394
- print(f"Staged CREATE_SPACE action for {repo_id}")
395
-
396
- elif command == "DELETE_FILE" and args:
397
- file_path = args[0]
398
- changeset.append({"type": "DELETE_FILE", "path": file_path})
399
- print(f"Staged DELETE_FILE action for {file_path}")
400
- # Note: AI might propose deleting a file it *just* created/updated in the same turn.
401
- # The application logic handle_confirm_changes should process deletes *before* adds/updates,
402
- # or the commit operation itself should handle the conflict gracefully (delete then re-add/update).
403
- # The current `apply_staged_changes` does deletes first, then uploads.
404
-
405
- elif command == "SET_PRIVATE" and args:
406
- private = args[0].lower() == 'true'
407
- # Action applies to the currently loaded space
408
- changeset.append({"type": "SET_PRIVACY", "private": private, "repo_id": f"{hf_owner_name}/{hf_repo_name}"})
409
- print(f"Staged SET_PRIVACY action for {hf_owner_name}/{hf_repo_name} to {private}")
410
-
411
- elif command == "DELETE_SPACE":
412
- # Action applies to the currently loaded space
413
- changeset.append({"type": "DELETE_SPACE", "owner": hf_owner_name, "space_name": hf_repo_name})
414
- print(f"Staged DELETE_SPACE action for {hf_owner_name}/{hf_repo_name}")
415
- # Add other actions here as needed (e.g., `SET_HARDWARE`, `RESTART_SPACE`)
416
-
417
- # 3. Compare proposed files from AI with current files to determine CREATE/UPDATE
418
- # Iterate through files proposed by the AI in this turn
419
  for file_info in ai_proposed_files_list:
420
  filename = file_info["path"]
421
  proposed_content = file_info["content"]
422
 
423
  if filename in current_files_dict:
424
- # File exists, check if content changed
425
  current_content = current_files_dict[filename]["code"]
426
- if proposed_content != current_content:
427
- # Check if current file state is a binary/error placeholder before marking as update
428
- # If current is placeholder and proposed is content, treat as update (content is now known)
429
- # If both are placeholders or proposed is placeholder, maybe skip or special flag?
430
- is_current_placeholder = current_content.startswith(("[Binary file", "[Error loading content:", "[Binary or Skipped file]"))
431
- is_proposed_placeholder = proposed_content.startswith(("[Binary file", "[Error loading content:", "[Binary or Skipped file]"))
432
-
433
- if not is_proposed_placeholder: # Only stage update if AI provides actual content
434
- # Determine language for potential new/updated file block representation
435
- # Use the language if the file was already in cache, otherwise infer from filename
436
- lang = current_files_dict[filename].get("language") or _infer_lang_from_filename(filename)
437
- changeset.append({"type": "UPDATE_FILE", "path": filename, "content": proposed_content, "lang": lang})
438
- print(f"Staged UPDATE_FILE action for {filename}")
439
- elif is_current_placeholder and is_proposed_placeholder:
440
- print(f"Skipping staging update for {filename}: both current and proposed content are placeholders.")
441
- elif not is_current_placeholder and is_proposed_placeholder:
442
- print(f"Warning: AI proposed placeholder content for existing file {filename}. Staging ignored.")
443
 
 
 
 
 
 
444
 
445
  else:
446
- # File does not exist, stage as CREATE
447
- # Only stage creation if AI provides actual content, not just a placeholder
448
  proposed_content = file_info["content"]
449
  if not (proposed_content.startswith("[Binary file") or proposed_content.startswith("[Error loading content:") or proposed_content.startswith("[Binary or Skipped file]")):
450
- lang = _infer_lang_from_filename(filename) # Infer language for a new file
451
  changeset.append({"type": "CREATE_FILE", "path": filename, "content": proposed_content, "lang": lang})
452
- print(f"Staged CREATE_FILE action for {filename}")
453
  else:
454
  print(f"Skipping staging create for {filename}: Proposed content is a placeholder.")
455
 
456
-
457
- # 4. Format the changeset into a human-readable Markdown string
458
  if not changeset:
459
  md_summary = ["### πŸ“‹ Proposed Changes Plan", "\nThe AI did not propose any specific changes to files or the space.\n"]
460
  else:
461
  md_summary = ["### πŸ“‹ Proposed Changes Plan\n"]
462
  md_summary.append("The AI has proposed the following changes. Please review and confirm.")
463
 
464
- # Separate action types for clearer display
465
  file_changes = [c for c in changeset if c['type'] in ['CREATE_FILE', 'UPDATE_FILE', 'DELETE_FILE']]
466
  space_actions = [c for c in changeset if c['type'] not in ['CREATE_FILE', 'UPDATE_FILE', 'DELETE_FILE']]
467
 
@@ -474,7 +335,7 @@ def generate_and_stage_changes(ai_response_content, current_files_state, hf_owne
474
  md_summary.append(f"- **πŸ”’ Set Privacy:** Set `{change.get('repo_id', '...')}` to `private={change.get('private', False)}`")
475
  elif change["type"] == "DELETE_SPACE":
476
  md_summary.append(f"- **πŸ’₯ DELETE ENTIRE SPACE:** `{change.get('owner', '...')}/{change.get('space_name', '...')}` **(DESTRUCTIVE ACTION)**")
477
- md_summary.append("") # Add newline after actions
478
 
479
  if file_changes:
480
  md_summary.append("**File Changes:**")
@@ -489,35 +350,34 @@ def generate_and_stage_changes(ai_response_content, current_files_state, hf_owne
489
  return changeset, "\n".join(md_summary)
490
 
491
 
492
- # --- Gradio Event Handlers ---
493
-
494
  def handle_chat_submit(user_message, chat_history, hf_api_key_input, provider_api_key_input, provider_select, model_select, system_prompt, hf_owner_name, hf_repo_name):
495
  global parsed_code_blocks_state_cache
496
  _chat_msg_in, _chat_hist = "", list(chat_history)
 
497
 
498
- # Hide confirmation UI while AI is thinking
499
  yield (
500
- _chat_msg_in, _chat_hist, "Initializing...",
501
- gr.update(), gr.update(), gr.update(interactive=False), gr.update(value="*No changes proposed.*"), # Clear summary
502
- [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False) # Hide confirm UI
503
  )
504
 
505
  if not user_message.strip():
 
506
  yield (
507
- _chat_msg_in, _chat_hist, "Cannot send an empty message.",
508
  gr.update(), gr.update(), gr.update(), gr.update(),
509
  [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
510
  )
511
  return
512
 
513
  _chat_hist.append((user_message, None))
 
514
  yield (
515
- _chat_msg_in, _chat_hist, f"Sending to {model_select}...",
516
  gr.update(), gr.update(), gr.update(), gr.update(),
517
  [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
518
  )
519
 
520
- # Prepare context for the AI - Export current state to Markdown
521
  current_sys_prompt = system_prompt.strip() or DEFAULT_SYSTEM_PROMPT
522
  space_id_for_context = f"{hf_owner_name}/{hf_repo_name}" if hf_owner_name and hf_repo_name else "your-space"
523
  export_result = _export_selected_logic(None, space_id_for_context, parsed_code_blocks_state_cache)
@@ -527,121 +387,90 @@ def handle_chat_submit(user_message, chat_history, hf_api_key_input, provider_ap
527
  user_message_with_context = user_message.strip() + current_files_context
528
  api_msgs = _convert_gr_history_to_api_messages(current_sys_prompt, _chat_hist[:-1], user_message_with_context)
529
 
 
530
  try:
531
- full_bot_response_content = ""
532
- # Pass the provider API key from the UI to the generation logic
533
  streamer = generate_stream(provider_select, model_select, provider_api_key_input, api_msgs)
534
  for chunk in streamer:
535
  if chunk is None: continue
536
- if isinstance(chunk, str):
537
- # Check for error indicators early
538
- if full_bot_response_content == "" and (chunk.startswith("Error:") or chunk.startswith("API HTTP Error")):
539
- full_bot_response_content = chunk; break
540
- full_bot_response_content += str(chunk)
541
-
542
  _chat_hist[-1] = (user_message, full_bot_response_content)
 
543
  yield (
544
- _chat_msg_in, _chat_hist, f"Streaming from {model_select}...",
545
  gr.update(), gr.update(), gr.update(), gr.update(),
546
  [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
547
  )
548
 
549
- # Handle potential errors from the streamer
550
  if full_bot_response_content.startswith("Error:") or full_bot_response_content.startswith("API HTTP Error"):
551
  _status = full_bot_response_content
552
  yield (_chat_msg_in, _chat_hist, _status, gr.update(), gr.update(), gr.update(), gr.update(), [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False))
553
  return
554
 
555
- # --- Post-streaming: Parse AI output, update cache, stage changes ---
556
-
557
  _status = "Stream complete. Parsing response and staging changes..."
558
  yield (_chat_msg_in, _chat_hist, _status, gr.update(), gr.update(), gr.update(), gr.update(), [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False))
559
 
560
- # 1. Update the state cache based on the *full* AI response
561
- # This reflects the AI's understanding and proposed file content *in the UI*.
562
  parsed_code_blocks_state_cache, proposed_filenames_in_turn = _parse_and_update_state_cache(full_bot_response_content, parsed_code_blocks_state_cache)
563
-
564
- # Regenerate UI previews based on the updated cache
565
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(hf_owner_name, hf_repo_name)
566
 
567
- # 2. Generate the changeset and summary based on the AI output and current cache state
568
- # Pass the updated cache to generate_and_stage_changes for comparison
569
  staged_changeset, summary_md = generate_and_stage_changes(full_bot_response_content, parsed_code_blocks_state_cache, hf_owner_name, hf_repo_name)
570
 
571
-
572
  if not staged_changeset:
573
- _status = summary_md # Will be "No changes proposed" message
574
  yield (
575
  _chat_msg_in, _chat_hist, _status,
576
  _detected, _formatted, _download,
577
- [], # Clear changeset state
578
- gr.update(value=summary_md), # Display summary
579
- gr.update(visible=False), gr.update(visible=False), gr.update(visible=False) # Hide confirm UI
580
  )
581
  else:
582
  _status = "Change plan generated. Please review and confirm below."
583
  yield (
584
  _chat_msg_in, _chat_hist, _status,
585
  _detected, _formatted, _download,
586
- staged_changeset, # Send changeset to state
587
- gr.update(value=summary_md), # Display summary
588
- gr.update(visible=True), # Show the accordion
589
- gr.update(visible=True), # Show confirm button
590
- gr.update(visible=True) # Show cancel button
591
  )
592
 
593
  except Exception as e:
594
- error_msg = f"An unexpected error occurred: {e}"
595
  print(f"Error in handle_chat_submit: {e}")
596
  import traceback
597
  traceback.print_exc()
598
  if _chat_hist:
599
- # Ensure the last message is not None before updating
600
  if _chat_hist[-1] and _chat_hist[-1][0] == user_message:
601
  _chat_hist[-1] = (user_message, (full_bot_response_content + "\n\n" if full_bot_response_content and full_bot_response_content != user_message else "") + error_msg)
602
- else: # Should not happen, but as fallback
603
  _chat_hist.append((user_message, error_msg))
604
 
605
-
606
- # Regenerate UI previews based on the updated cache (even if there was an error after streaming)
607
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(hf_owner_name, hf_repo_name)
608
 
609
  yield (
610
  _chat_msg_in, _chat_hist, error_msg,
611
  _detected, _formatted, _download,
612
- [], # Clear changeset state on error
613
- gr.update(value="*Error occurred, changes plan cleared.*"), # Clear summary display
614
- gr.update(visible=False), gr.update(visible=False), gr.update(visible=False) # Hide confirm UI
615
  )
616
 
617
 
618
  def handle_confirm_changes(hf_api_key, owner_name, space_name, changeset):
619
- """Applies the staged changes from the changeset."""
620
  global parsed_code_blocks_state_cache
621
 
622
- # Hide the confirmation UI immediately
623
- yield "Applying changes...", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
624
- # Keep the summary visible potentially with a loading indicator? Or clear? Let's clear it.
625
- yield "Applying changes...", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(value="*Applying changes...*")
626
 
627
  if not changeset:
628
- # This shouldn't happen if the button is hidden, but as a safeguard
629
  return "No changes to apply.", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(value="No changes were staged.")
630
 
631
- # Call the build_logic function to apply the changes
632
- # The build_logic function will return a status message string
633
  status_message = apply_staged_changes(hf_api_key, owner_name, space_name, changeset)
634
 
635
- # After applying changes, reload the space state to reflect the actual state on the Hub
636
- # This is important because the build_logic might fail partially, or the AI's cache might be outdated.
637
- # Reloading ensures the UI reflects the reality on the Hub.
638
  _status_reload = f"{status_message} | Reloading Space state..."
639
  yield _status_reload, gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(value="*Reloading Space state...*")
640
 
641
- # Need to call handle_load_existing_space or similar logic here
642
- # Let's replicate the core logic from handle_load_existing_space needed to refresh the cache and UI
643
- # Note: This doesn't update the chat history or other parts of handle_load_existing_space,
644
- # just the file-related UI elements.
645
  refreshed_file_list = []
646
  reload_error = None
647
  repo_id_for_reload = f"{owner_name}/{space_name}" if owner_name and space_name else None
@@ -650,68 +479,49 @@ def handle_confirm_changes(hf_api_key, owner_name, space_name, changeset):
650
  sdk, file_list, err_list = get_space_repository_info(hf_api_key, space_name, owner_name)
651
  if err_list:
652
  reload_error = f"Error reloading file list after changes: {err_list}"
653
- parsed_code_blocks_state_cache = [] # Clear cache if list fails
654
  else:
655
  refreshed_file_list = file_list
656
  loaded_files = []
657
  for file_path in refreshed_file_list:
658
  content, err_get = get_space_file_content(hf_api_key, space_name, owner_name, file_path)
659
  lang = _infer_lang_from_filename(file_path)
660
- is_binary = lang == "binary" or err_get # Assume error indicates binary/unreadable
661
- code = f"[Error loading content: {err_get}]" if err_get else content
662
  loaded_files.append({"filename": file_path, "code": code, "language": lang, "is_binary": is_binary, "is_structure_block": False})
663
- parsed_code_blocks_state_cache = loaded_files # Update cache with refreshed state
664
- # Add back the structure block if it was in the cache before reload (it's AI generated, not from Hub)
665
- # This might be wrong - the cache should represent the *actual* state + AI's last proposed file changes.
666
- # Let's keep AI's proposed structure block in the cache until a new one replaces it.
667
- # But reloading from Hub should overwrite the *file content*. The structure block is separate.
668
- # If we reload from Hub, the cache should be *only* Hub files + the last AI structure block.
669
- last_ai_structure_block = next((b for b in parsed_code_blocks_state_cache if b.get("is_structure_block")), None) # Check cache *before* clearing
670
- if last_ai_structure_block:
671
- # Find it in the *new* loaded_files list if it exists there (e.g. README.md could be structure?)
672
- # Or just re-add the AI structure block if it was in the cache previously?
673
- # Let's stick to the simpler model: AI structure block is just for display/context in the markdown tab,
674
- # the actual files are what's loaded/applied. Reloading files replaces the file blocks in cache, structure block is kept or removed based on AI output.
675
- # On reload, we only get actual files from the Hub. The AI structure block is NOT on the hub.
676
- # So, clearing and adding only Hub files is correct.
677
- pass # last_ai_structure_block is not added back.
678
 
679
  else:
680
  reload_error = "Cannot reload Space state: Owner or Space Name missing."
681
 
682
-
683
- # Regenerate UI previews based on the refreshed cache state
684
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(owner_name, space_name)
685
 
686
  final_overall_status = status_message + (f" | Reload Status: {reload_error}" if reload_error else " | Reload Status: Space state refreshed.")
687
 
688
- # Clear the changeset state after application attempt
689
  cleared_changeset = []
690
 
691
- # Return updated UI elements and hide confirmation UI
692
  return (
693
  final_overall_status,
694
  _formatted,
695
  _detected,
696
  _download,
697
- gr.update(visible=False), # Hide accordion
698
- gr.update(visible=False), # Hide confirm button
699
- gr.update(visible=False), # Hide cancel button
700
- cleared_changeset, # Clear changeset state
701
- gr.update(value="*No changes proposed.*") # Clear summary display
702
  )
703
 
704
 
705
  def handle_cancel_changes():
706
- """Clears the staged changeset and hides the confirmation UI."""
707
- global parsed_code_blocks_state_cache # Cancel doesn't affect the cache state
708
  return (
709
  "Changes cancelled.",
710
- [], # Clear changeset state
711
- gr.update(value="*No changes proposed.*"), # Clear summary display
712
- gr.update(visible=False), # Hide accordion
713
- gr.update(visible=False), # Hide confirm button
714
- gr.update(visible=False) # Hide cancel button
715
  )
716
 
717
 
@@ -727,76 +537,67 @@ def handle_load_existing_space(hf_api_key_ui, ui_owner_name, ui_space_name):
727
  _formatted_md_val, _detected_preview_val, _status_val = "*Loading files...*", "*Loading files...*", f"Loading Space: {ui_owner_name}/{ui_space_name}..."
728
  _file_browser_update, _iframe_html_update, _download_btn_update = gr.update(visible=False, choices=[], value=None), gr.update(value=None, visible=False), gr.update(interactive=False, value=None)
729
  _build_status_clear, _edit_status_clear, _runtime_status_clear = "*Build status...*", "*Select a file...*", "*Runtime status...*"
730
- _chat_history_clear = [] # Don't clear chat history on load
731
- _changeset_clear = [] # Clear staged changes on load
732
- _changeset_summary_clear = "*No changes proposed.*" # Clear summary on load
733
- _confirm_ui_hidden = gr.update(visible=False) # Hide confirm UI on load
734
-
735
 
736
- # Initial yield to show loading state
737
  outputs = [
738
  _formatted_md_val, _detected_preview_val, _status_val, _file_browser_update,
739
- gr.update(value=ui_owner_name), gr.update(value=ui_space_name), # Update owner/space fields immediately
740
  _iframe_html_update, _download_btn_update, _build_status_clear,
741
- _edit_status_clear, _runtime_status_clear, _chat_history_clear,
742
- _changeset_clear, _changeset_summary_clear, _confirm_ui_hidden, _confirm_ui_hidden, _confirm_ui_hidden # Hide confirmation UI
743
  ]
744
  yield outputs
745
 
746
  owner_to_use = ui_owner_name
747
  if not owner_to_use:
748
- token, err = build_logic_get_api_token(hf_api_key_ui)
749
- if err or not token:
750
- _status_val = f"Error: {err or 'Cannot determine owner from token.'}"
751
  outputs[2] = _status_val; yield outputs; return
752
  try:
753
  user_info = build_logic_whoami(token=token)
754
  owner_to_use = user_info.get('name')
755
  if not owner_to_use: raise Exception("Could not find user name from token.")
756
- outputs[4] = gr.update(value=owner_to_use) # Update UI owner field
757
  _status_val += f" (Auto-detected owner: {owner_to_use})"
758
  except Exception as e:
759
- _status_val = f"Error auto-detecting owner: {e}"; outputs[2] = _status_val; yield outputs; return
760
 
761
  if not owner_to_use or not ui_space_name:
762
- _status_val = "Error: Owner and Space Name are required."; outputs[2] = _status_val; yield outputs; return
763
 
764
  sdk, file_list, err = get_space_repository_info(hf_api_key_ui, ui_space_name, owner_to_use)
765
 
766
- # Always update owner/space inputs even on error, as user entered them
767
  outputs[4] = gr.update(value=owner_to_use)
768
  outputs[5] = gr.update(value=ui_space_name)
769
 
770
  if err:
771
  _status_val = f"Load Error: {err}"
772
- parsed_code_blocks_state_cache = [] # Clear cache on load error
773
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name)
774
- outputs[0], outputs[1], outputs[2], outputs[7] = _formatted, _detected, _status_val, _download # Update markdown, preview, status, download
775
- outputs[3] = gr.update(visible=False, choices=[], value=None) # Hide file browser
776
- outputs[6] = gr.update(value=None, visible=False) # Hide iframe
777
  yield outputs; return
778
 
779
- # Success case: Populate cache and UI
780
  loaded_files = []
781
  for file_path in file_list:
782
  content, err_get = get_space_file_content(hf_api_key_ui, ui_space_name, owner_to_use, file_path)
783
  lang = _infer_lang_from_filename(file_path)
784
- is_binary = lang == "binary" or (err_get is not None) # err_get will be a string if error
785
- code = f"[Error loading content: {err_get}]" if err_get else (content or "") # Ensure code is empty string if content is None
786
  loaded_files.append({"filename": file_path, "code": code, "language": lang, "is_binary": is_binary, "is_structure_block": False})
787
 
788
- # When loading, the cache should only contain the actual files from the Hub.
789
- # Any previous AI-generated structure block is discarded.
790
  parsed_code_blocks_state_cache = loaded_files
791
 
792
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name)
793
  _status_val = f"Successfully loaded {len(file_list)} files from {owner_to_use}/{ui_space_name}. SDK: {sdk or 'unknown'}."
794
- outputs[0], outputs[1], outputs[2], outputs[7] = _formatted, _detected, _status_val, _download # Update markdown, preview, status, download
795
 
796
- # Update file browser dropdown
797
  outputs[3] = gr.update(visible=True, choices=sorted(file_list or []), value=None)
798
 
799
- # Update iframe preview
800
  if owner_to_use and ui_space_name:
801
  sub_owner = re.sub(r'[^a-z0-9\-]+', '-', owner_to_use.lower()).strip('-') or 'owner'
802
  sub_repo = re.sub(r'[^a-z0-9\-]+', '-', ui_space_name.lower()).strip('-') or 'space'
@@ -805,107 +606,118 @@ def handle_load_existing_space(hf_api_key_ui, ui_owner_name, ui_space_name):
805
  else:
806
  outputs[6] = gr.update(value=None, visible=False)
807
 
808
-
809
  yield outputs
810
 
811
- # This manual build button now uses the formatted_space_output_display content
812
- # It's separate from the AI-driven apply_staged_changes
813
  def handle_build_space_button(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, space_sdk_ui, is_private_ui, formatted_markdown_content):
814
- _build_status, _iframe_html, _file_browser_update = "Starting manual space build process...", gr.update(value=None, visible=False), gr.update(visible=False, choices=[], value=None)
815
- # Also hide confirmation UI and clear state on manual build
 
816
  _changeset_clear = []
817
  _changeset_summary_clear = "*Manual build initiated, changes plan cleared.*"
818
  _confirm_ui_hidden = gr.update(visible=False)
819
 
820
  yield (_build_status, _iframe_html, _file_browser_update, gr.update(value=ui_owner_name_part), gr.update(value=ui_space_name_part),
821
- _changeset_clear, _changeset_summary_clear, _confirm_ui_hidden, _confirm_ui_hidden, _confirm_ui_hidden)
 
 
822
 
823
  if not ui_space_name_part or "/" in ui_space_name_part:
824
  _build_status = f"Build Error: Invalid Space Name '{ui_space_name_part}'."
825
  yield (_build_status, _iframe_html, _file_browser_update, gr.update(), gr.update(),
826
- gr.update(), gr.update(), gr.update(), gr.update(), gr.update()); return
 
827
 
828
- # Use build_logic_create_space directly for manual build
829
- result_message = build_logic_create_space(ui_api_token_from_textbox=hf_api_key_ui, space_name_ui=ui_space_name_part, owner_ui=ui_owner_name_part, sdk_ui=space_sdk_ui, markdown_input=formatted_markdown_content, private=is_private_ui)
830
- _build_status = f"Manual Build Process: {result_message}"
831
 
832
- if "Successfully" in result_message:
833
- global parsed_code_blocks_state_cache
834
-
835
- owner_to_use = ui_owner_name_part # Assume determined correctly by build_logic_create_space
836
- space_to_use = ui_space_name_part
837
 
838
- # Attempt to update UI with the new state after successful build
839
- # This is similar to handle_load_existing_space post-success
840
- sdk_built, file_list, err_list = get_space_repository_info(hf_api_key_ui, space_to_use, owner_to_use)
841
- if err_list:
842
- _build_status += f" | Error reloading file list after build: {err_list}"
843
- parsed_code_blocks_state_cache = [] # Clear cache
844
- _file_browser_update = gr.update(visible=False, choices=[], value=None)
845
- _iframe_html = gr.update(value=None, visible=False)
846
- else:
847
- loaded_files = []
848
- for file_path in file_list:
849
- content, err_get = get_space_file_content(hf_api_key_ui, space_to_use, owner_to_use, file_path)
850
- lang = _infer_lang_from_filename(file_path)
851
- is_binary = lang == "binary" or (err_get is not None)
852
- code = f"[Error loading content: {err_get}]" if err_get else (content or "")
853
- loaded_files.append({"filename": file_path, "code": code, "language": lang, "is_binary": is_binary, "is_structure_block": False})
854
- parsed_code_blocks_state_cache = loaded_files # Update cache
855
 
856
- _file_browser_update = gr.update(visible=True, choices=sorted(file_list or []), value=None)
857
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
858
  sub_owner = re.sub(r'[^a-z0-9\-]+', '-', owner_to_use.lower()).strip('-') or 'owner'
859
- sub_repo = re.sub(r'[^a-z0-9\-]+', '-', space_to_use.lower()).strip('-') or 'space'
860
  iframe_url = f"https://{sub_owner}-{sub_repo}{'.static.hf.space' if sdk_built == 'static' else '.hf.space'}"
861
  _iframe_html = gr.update(value=f'<iframe src="{iframe_url}?__theme=light&embed=true" width="100%" height="700px"></iframe>', visible=True)
 
 
862
 
 
863
 
864
- # Need to update formatted/detected markdown displays after manual build as well
865
- _formatted_md, _detected_preview, _download = _generate_ui_outputs_from_cache(ui_owner_name_part, ui_space_name_part)
866
-
867
-
868
- yield (_build_status, _iframe_html, _file_browser_update, gr.update(value=ui_owner_name_part), gr.update(value=ui_space_name_part),
869
  _changeset_clear, _changeset_summary_clear, _confirm_ui_hidden, _confirm_ui_hidden, _confirm_ui_hidden,
870
- _formatted_md, _detected_preview, _download # Include these outputs
871
  )
872
 
873
 
874
  def handle_load_file_for_editing(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, selected_file_path):
875
  if not selected_file_path:
876
- yield gr.update(value=""), "Select a file.", gr.update(value=""), gr.update(language="plaintext")
877
- return
878
 
879
  content, err = get_space_file_content(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, selected_file_path)
880
  if err:
881
- # If load fails, clear editor and show error
882
- yield "", f"Error loading '{selected_file_path}': {err}", "", gr.update(language="plaintext")
883
- return
884
 
885
  lang = _infer_lang_from_filename(selected_file_path)
886
  commit_msg = f"Update {selected_file_path}"
887
- yield content, f"Loaded `{selected_file_path}`", commit_msg, gr.update(language=lang)
888
 
889
  def handle_commit_file_changes(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_edit_path, edited_content, commit_message):
890
  if not file_to_edit_path:
891
- return "Error: No file selected for commit.", gr.update(), gr.update(), gr.update(), gr.update()
892
 
893
  status_msg = update_space_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_edit_path, edited_content, commit_message)
894
- file_list, _ = list_space_files_for_browsing(hf_api_key_ui, ui_space_name_part, ui_owner_name_part) # Refresh file list dropdown
895
  global parsed_code_blocks_state_cache
896
  if "Successfully" in status_msg:
897
- # Update cache on success
898
- # Find the block in the cache and update its content and potentially language
899
  found = False
900
  for block in parsed_code_blocks_state_cache:
901
  if block["filename"] == file_to_edit_path and not block.get("is_structure_block"):
902
  block["code"] = edited_content
903
- block["language"] = _infer_lang_from_filename(file_to_edit_path) # Re-infer language in case extension changed
904
- block["is_binary"] = False # Assume edited content is text
905
  found = True
906
  break
907
  if not found:
908
- # If the file wasn't in cache (e.g., loaded after AI chat), add it
909
  parsed_code_blocks_state_cache.append({
910
  "filename": file_to_edit_path,
911
  "code": edited_content,
@@ -913,40 +725,42 @@ def handle_commit_file_changes(hf_api_key_ui, ui_space_name_part, ui_owner_name_
913
  "is_binary": False,
914
  "is_structure_block": False
915
  })
916
- # Re-sort cache
917
  parsed_code_blocks_state_cache.sort(key=lambda b: (0, b["filename"]) if b.get("is_structure_block") else (1, b["filename"]))
918
 
 
 
 
 
 
 
919
 
920
- # Regenerate markdown displays from updated cache
921
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(ui_owner_name_part, ui_space_name_part)
922
- return status_msg, gr.update(choices=sorted(file_list or [])), _formatted, _detected, _download
923
 
924
  def handle_delete_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_delete_path):
925
  if not file_to_delete_path:
926
- return "No file selected to delete.", gr.update(), "", "", "plaintext", gr.update(), gr.update(), gr.update()
927
 
928
  status_msg = build_logic_delete_space_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_delete_path)
929
- file_list, _ = list_space_files_for_browsing(hf_api_key_ui, ui_space_name_part, ui_owner_name_part) # Refresh file list dropdown
930
  global parsed_code_blocks_state_cache
 
 
 
 
 
931
  if "Successfully" in status_msg:
932
- # Update cache: remove the deleted file
933
  parsed_code_blocks_state_cache = [b for b in parsed_code_blocks_state_cache if b["filename"] != file_to_delete_path]
934
- # Clear the editor if the deleted file was currently loaded
935
  file_content_editor_update = gr.update(value="")
936
  commit_message_update = gr.update(value="")
937
  editor_lang_update = gr.update(language="plaintext")
938
- else:
939
- # If deletion failed, keep the editor content and status as they were
940
- file_content_editor_update = gr.update()
941
- commit_message_update = gr.update()
942
- editor_lang_update = gr.update()
943
-
944
 
945
- # Regenerate markdown displays from updated cache
946
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(ui_owner_name_part, ui_space_name_part)
947
  return (
948
  status_msg,
949
- gr.update(choices=sorted(file_list or []), value=None), # Clear selected value after delete
950
  file_content_editor_update,
951
  commit_message_update,
952
  editor_lang_update,
@@ -957,28 +771,26 @@ def handle_delete_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, fi
957
 
958
  def handle_refresh_space_status(hf_api_key_ui, ui_owner_name, ui_space_name):
959
  if not ui_owner_name or not ui_space_name:
960
- return "*Owner and Space Name must be provided to get status.*"
961
 
962
  status_details, err = get_space_runtime_status(hf_api_key_ui, ui_space_name, ui_owner_name)
963
- if err: return f"**Error:** {err}"
964
  if not status_details: return "*Could not retrieve status details.*"
965
 
966
  md = f"### Status for {ui_owner_name}/{ui_space_name}\n"
967
- # Display key status details
968
  md += f"- **Stage:** `{status_details.get('stage', 'N/A')}`\n"
969
- md += f"- **Status:** `{status_details.get('status', 'N/A')}`\n" # More detailed status
970
  md += f"- **Hardware:** `{status_details.get('hardware', 'N/A')}`\n"
971
  requested_hw = status_details.get('requested_hardware')
972
  if requested_hw: md += f"- **Requested Hardware:** `{requested_hw}`\n"
973
  error_msg = status_details.get('error_message')
974
- if error_msg: md += f"- **Error:** `{error_msg}`\n"
975
  log_link = status_details.get('full_log_link')
976
  if log_link and log_link != "#": md += f"- [View Full Logs]({log_link})\n"
977
 
978
  return md
979
 
980
 
981
- # --- UI Theming and CSS (Unchanged) ---
982
  custom_theme = gr.themes.Base(primary_hue="teal", secondary_hue="purple", neutral_hue="zinc", text_size="sm", spacing_size="md", radius_size="sm", font=["System UI", "sans-serif"])
983
  custom_css = """
984
  body { background: linear-gradient(to bottom right, #2c3e50, #34495e); color: #ecf0f1; }
@@ -988,28 +800,20 @@ body { background: linear-gradient(to bottom right, #2c3e50, #34495e); color: #e
988
  .gr-button.gr-button-primary { background-color: #1abc9c !important; color: white !important; border-color: #16a085 !important; }
989
  .gr-button.gr-button-secondary { background-color: #9b59b6 !important; color: white !important; border-color: #8e44ad !important; }
990
  .gr-button.gr-button-stop { background-color: #e74c3c !important; color: white !important; border-color: #c0392b !important; }
991
- .gr-markdown { background-color: rgba(44, 62, 80, 0.7) !important; padding: 10px; border-radius: 5px; overflow-x: auto; } /* Added overflow-x */
992
  .gr-markdown h1, .gr-markdown h2, .gr-markdown h3, .gr-markdown h4, .gr-markdown h5, .gr-markdown h6 { color: #ecf0f1 !important; border-bottom-color: rgba(189, 195, 199, 0.3) !important; }
993
  .gr-markdown pre code { background-color: rgba(52, 73, 94, 0.95) !important; border-color: rgba(189, 195, 199, 0.3) !important; }
994
  .gr-chatbot { background-color: rgba(44, 62, 80, 0.7) !important; border-color: rgba(189, 195, 199, 0.2) !important; }
995
  .gr-chatbot .message { background-color: rgba(52, 73, 94, 0.9) !important; color: #ecf0f1 !important; border-color: rgba(189, 195, 199, 0.3) !important; }
996
  .gr-chatbot .message.user { background-color: rgba(46, 204, 113, 0.9) !important; color: black !important; }
997
- /* Custom styles for Proposed Changes Accordion */
998
  .gradio-container .gr-accordion { border-color: rgba(189, 195, 199, 0.3) !important; }
999
  .gradio-container .gr-accordion.closed { background-color: rgba(52, 73, 94, 0.9) !important; }
1000
  .gradio-container .gr-accordion.open { background-color: rgba(44, 62, 80, 0.8) !important; }
1001
 
1002
  """
1003
 
1004
-
1005
- # --- Gradio UI Definition ---
1006
  with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
1007
- # State to hold the parsed change plan from the AI
1008
  changeset_state = gr.State([])
1009
- # State to hold the *current* representation of the Space's files and structure.
1010
- # This is populated on load and updated by AI outputs or manual edits/deletes.
1011
- # It's shared globally across handlers that modify/read the Space state.
1012
- # parsed_code_blocks_state_cache = gr.State([]) # Global variable is simpler for now
1013
 
1014
  gr.Markdown("# πŸ€– AI-Powered Hugging Face Space Commander")
1015
  gr.Markdown("Use an AI assistant to create, modify, build, and manage your Hugging Face Spaces directly from this interface.")
@@ -1022,29 +826,19 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
1022
  with gr.Accordion("βš™οΈ Configuration", open=True):
1023
  hf_api_key_input = gr.Textbox(label="Hugging Face Token", type="password", placeholder="hf_... (uses env var HF_TOKEN if empty)")
1024
  owner_name_input = gr.Textbox(label="HF Owner Name", placeholder="e.g., your-username")
1025
- space_name_input = gr.Textbox(label="HF Space Name", value="") # Default to empty
1026
  load_space_button = gr.Button("πŸ”„ Load Existing Space", variant="secondary")
1027
- gr.Markdown("---") # Separator
1028
- # Manual Space Actions (outside AI flow, but use same backend)
1029
- # set_privacy_button = gr.Button("πŸ”’ Toggle Space Privacy", variant="secondary") # Could be checkbox + button
1030
- # delete_space_button = gr.Button("πŸ’₯ Delete Entire Space", variant="stop") # Needs confirmation modal
1031
-
1032
 
1033
  with gr.Accordion("πŸ€– AI Model Settings", open=True):
1034
- # --- MODIFIED: Set up default provider and model logic on load ---
1035
  available_providers = get_available_providers()
1036
  default_provider = 'Groq'
1037
- # Fallback if 'Groq' is not an option, or if list is smaller than 3
1038
  if default_provider not in available_providers:
1039
  default_provider = available_providers[0] if available_providers else None
1040
  elif len(available_providers) < 3:
1041
  default_provider = available_providers[0] if available_providers else None
1042
 
1043
-
1044
- # Get initial models and the default model for the selected provider
1045
  initial_models = get_models_for_provider(default_provider) if default_provider else []
1046
  initial_model = get_default_model_for_provider(default_provider) if default_provider else None
1047
- # Fallback for the model as well
1048
  if initial_model not in initial_models:
1049
  initial_model = initial_models[0] if initial_models else None
1050
 
@@ -1060,7 +854,6 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
1060
  value=initial_model,
1061
  allow_custom_value=False
1062
  )
1063
- # --- END MODIFICATION ---
1064
  provider_api_key_input = gr.Textbox(label="Model Provider API Key (Optional)", type="password", placeholder="sk_... (overrides backend settings)")
1065
  system_prompt_input = gr.Textbox(label="System Prompt", lines=10, value=DEFAULT_SYSTEM_PROMPT, elem_id="system-prompt")
1066
 
@@ -1072,7 +865,6 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
1072
  send_chat_button = gr.Button("Send", variant="primary", scale=1)
1073
  status_output = gr.Textbox(label="Last Action Status", interactive=False, value="Ready.")
1074
 
1075
- # Confirmation Accordion - Initially hidden
1076
  with gr.Accordion("πŸ“ Proposed Changes (Pending Confirmation)", open=False, visible=False) as confirm_accordion:
1077
  changeset_display = gr.Markdown("*No changes proposed.*")
1078
  with gr.Row():
@@ -1083,55 +875,42 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
1083
  with gr.TabItem("πŸ“ Generated Markdown & Build"):
1084
  with gr.Row():
1085
  with gr.Column(scale=2):
1086
- # This textbox shows the full markdown representation based on the *current state cache*
1087
  formatted_space_output_display = gr.Textbox(label="Current Space Definition (Generated Markdown)", lines=20, interactive=True, value="*Load or create a space to see its definition.*")
1088
  download_button = gr.DownloadButton(label="Download .md", interactive=False)
1089
  with gr.Column(scale=1):
1090
- gr.Markdown("### Build Controls")
1091
- # Manual build controls
1092
  space_sdk_select = gr.Dropdown(label="Space SDK", choices=["gradio", "streamlit", "docker", "static"], value="gradio", interactive=True)
1093
  space_private_checkbox = gr.Checkbox(label="Make Space Private", value=False, interactive=True)
1094
- # Manual build button now builds from the content in `formatted_space_output_display`
1095
  build_space_button = gr.Button("πŸš€ Build / Update Space from Markdown", variant="primary")
1096
- build_status_display = gr.Textbox(label="Manual Build/Update Status", interactive=False)
1097
- gr.Markdown("---") # Separator
1098
- # Manual status check (uses build_logic)
1099
  refresh_status_button = gr.Button("πŸ”„ Refresh Runtime Status")
1100
  space_runtime_status_display = gr.Markdown("*Runtime status will appear here.*")
1101
 
1102
  with gr.TabItem("πŸ” Files Preview"):
1103
- # This markdown shows the *latest* version of each file from the cache
1104
  detected_files_preview = gr.Markdown(value="*A preview of the latest file versions will appear here.*")
1105
 
1106
  with gr.TabItem("✏️ Live File Editor & Preview"):
1107
  with gr.Row():
1108
  with gr.Column(scale=1):
1109
  gr.Markdown("### Live Editor")
1110
- # Dropdown lists files from the current state cache
1111
  file_browser_dropdown = gr.Dropdown(label="Select File in Space", choices=[], interactive=True)
1112
- # Editor for selected file content
1113
  file_content_editor = gr.Code(label="File Content Editor", language="python", lines=15, interactive=True, value="")
1114
  commit_message_input = gr.Textbox(label="Commit Message", placeholder="e.g., Updated app.py", interactive=True, value="")
1115
  with gr.Row():
1116
- # Manual file actions (use build_logic directly)
1117
  update_file_button = gr.Button("Commit Changes", variant="primary", interactive=True)
1118
  delete_file_button = gr.Button("πŸ—‘οΈ Delete Selected File", variant="stop", interactive=True)
1119
  edit_status_display = gr.Textbox(label="File Edit/Delete Status", interactive=False, value="")
1120
  with gr.Column(scale=1):
1121
  gr.Markdown("### Live Space Preview")
1122
- # Iframe preview of the space (updates on load and successful build)
1123
  space_iframe_display = gr.HTML(value="", visible=True)
1124
 
1125
- # --- Event Listeners ---
1126
-
1127
- # Model dropdown update logic
1128
  provider_select.change(update_models_dropdown, inputs=provider_select, outputs=model_select)
1129
 
1130
- # Chat submission logic
1131
  chat_inputs = [
1132
  chat_message_input, chatbot_display, hf_api_key_input,
1133
  provider_api_key_input, provider_select, model_select, system_prompt_input,
1134
- owner_name_input, space_name_input # Need current space info for context/actions
1135
  ]
1136
  chat_outputs = [
1137
  chat_message_input, chatbot_display, status_output,
@@ -1141,11 +920,10 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
1141
  send_chat_button.click(handle_chat_submit, inputs=chat_inputs, outputs=chat_outputs)
1142
  chat_message_input.submit(handle_chat_submit, inputs=chat_inputs, outputs=chat_outputs)
1143
 
1144
- # Confirmation Button Listeners for AI-proposed changes
1145
  confirm_inputs = [hf_api_key_input, owner_name_input, space_name_input, changeset_state]
1146
  confirm_outputs = [
1147
  status_output, formatted_space_output_display, detected_files_preview, download_button,
1148
- confirm_accordion, confirm_button, cancel_button, changeset_state, changeset_display # Also clear summary display
1149
  ]
1150
  confirm_button.click(handle_confirm_changes, inputs=confirm_inputs, outputs=confirm_outputs)
1151
 
@@ -1155,14 +933,12 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
1155
  ]
1156
  cancel_button.click(handle_cancel_changes, inputs=None, outputs=cancel_outputs)
1157
 
1158
- # Load Existing Space Button logic
1159
  load_space_outputs = [
1160
  formatted_space_output_display, detected_files_preview, status_output,
1161
- file_browser_dropdown, owner_name_input, space_name_input, # Update these inputs as well
1162
  space_iframe_display, download_button, build_status_display,
1163
  edit_status_display, space_runtime_status_display,
1164
- chatbot_display, # Keep chat history
1165
- changeset_state, changeset_display, confirm_accordion, confirm_button, cancel_button # Clear and hide confirm UI
1166
  ]
1167
  load_space_button.click(
1168
  fn=handle_load_existing_space,
@@ -1170,37 +946,31 @@ with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
1170
  outputs=load_space_outputs
1171
  )
1172
 
1173
- # Manual Build Button logic
1174
  build_outputs = [
1175
  build_status_display, space_iframe_display, file_browser_dropdown,
1176
- owner_name_input, space_name_input, # Update inputs based on build result
1177
- changeset_state, changeset_display, confirm_accordion, confirm_button, cancel_button, # Clear and hide confirm UI
1178
- formatted_space_output_display, detected_files_preview, download_button # Update markdown displays
1179
  ]
1180
  build_inputs = [
1181
  hf_api_key_input, space_name_input, owner_name_input, space_sdk_select,
1182
- space_private_checkbox, formatted_space_output_display # Use content from this textbox
1183
  ]
1184
  build_space_button.click(fn=handle_build_space_button, inputs=build_inputs, outputs=build_outputs)
1185
 
1186
-
1187
- # Manual File Editor Load logic
1188
- file_edit_load_outputs = [file_content_editor, edit_status_display, commit_message_input, file_content_editor] # last one updates language
1189
  file_browser_dropdown.change(fn=handle_load_file_for_editing, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown], outputs=file_edit_load_outputs)
1190
 
1191
- # Manual File Commit logic
1192
  commit_file_outputs = [edit_status_display, file_browser_dropdown, formatted_space_output_display, detected_files_preview, download_button]
1193
  update_file_button.click(fn=handle_commit_file_changes, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown, file_content_editor, commit_message_input], outputs=commit_file_outputs)
1194
 
1195
- # Manual File Delete logic
1196
  delete_file_outputs = [
1197
  edit_status_display, file_browser_dropdown,
1198
- file_content_editor, commit_message_input, file_content_editor, # Clear editor fields
1199
- formatted_space_output_display, detected_files_preview, download_button # Update markdown displays
1200
  ]
1201
  delete_file_button.click(fn=handle_delete_file, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown], outputs=delete_file_outputs)
1202
 
1203
- # Refresh Runtime Status logic
1204
  refresh_status_button.click(fn=handle_refresh_space_status, inputs=[hf_api_key_input, owner_name_input, space_name_input], outputs=[space_runtime_status_display])
1205
 
1206
  if __name__ == "__main__":
 
1
+ ### File: app.py
2
+ ```python
3
  import gradio as gr
4
  import re
5
  import json
 
8
  import shlex
9
  from huggingface_hub import HfApi
10
 
11
+ from build_logic import (
12
+ _get_api_token as build_logic_get_api_token,
13
+ whoami as build_logic_whoami,
14
+ list_space_files_for_browsing,
15
+ get_space_repository_info,
16
+ get_space_file_content,
17
+ update_space_file,
18
+ parse_markdown as build_logic_parse_markdown,
19
+ delete_space_file as build_logic_delete_space_file,
20
+ get_space_runtime_status,
21
+ apply_staged_changes,
22
+ )
23
+
24
+ from model_logic import (
25
+ get_available_providers,
26
+ get_models_for_provider,
27
+ get_default_model_for_provider,
28
+ generate_stream
29
+ )
30
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  backtick = chr(96)
32
  bbb = f'{backtick}{backtick}{backtick}'
33
 
 
 
34
  parsed_code_blocks_state_cache = []
35
  BOT_ROLE_NAME = "assistant"
36
 
 
83
  If no code or actions are requested, respond conversationally and help the user understand the Space Commander's capabilities.
84
  """
85
 
 
 
 
 
86
  def escape_html_for_markdown(text):
87
  if not isinstance(text, str): return ""
88
+ return text.replace("&", "&").replace("<", "<").replace(">", ">")
89
 
90
  def _infer_lang_from_filename(filename):
91
  if not filename: return "plaintext"
 
114
 
115
  def _clean_filename(filename_line_content):
116
  text = filename_line_content.strip()
 
117
  text = re.sub(r'[`\*_#]+', '', text).strip()
 
118
  text = re.split(r'\s*\(', text, 1)[0].strip()
 
119
  text = text.strip('\'":;,')
 
120
  text = text.lstrip('/')
121
  return text
122
 
123
  def _parse_and_update_state_cache(latest_bot_message_content, current_files_state):
 
 
 
 
 
 
124
  current_files_dict = {f["filename"]: f.copy() for f in current_files_state if not f.get("is_structure_block")}
125
  structure_block_state = next((b for b in current_files_state if b.get("is_structure_block")), None)
126
 
 
129
  file_pattern = re.compile(r"### File:\s*(?P<filename_line>[^\n]+)\n(?:```(?P<lang>[\w\.\-\+]*)\n(?P<code>[\s\S]*?)\n```|(?P<binary_msg>\[Binary file(?: - [^\]]+)?\]))", re.MULTILINE)
130
  structure_pattern = re.compile(r"## File Structure\n```(?:(?P<struct_lang>[\w.-]*)\n)?(?P<structure_code>[\s\S]*?)\n```", re.MULTILINE)
131
 
 
132
  structure_match = structure_pattern.search(content)
133
  if structure_match:
134
  structure_block_state = {"filename": "File Structure (from AI)", "language": structure_match.group("struct_lang") or "plaintext", "code": structure_match.group("structure_code").strip(), "is_binary": False, "is_structure_block": True}
135
 
136
  current_message_proposed_filenames = []
 
137
  for match in file_pattern.finditer(content):
138
  filename = _clean_filename(match.group("filename_line"))
139
+ if not filename: continue
 
 
140
 
141
  lang, code_block, binary_msg = match.group("lang"), match.group("code"), match.group("binary_msg")
142
 
 
145
  if code_block is not None:
146
  item_data["code"] = code_block.strip()
147
  item_data["language"] = (lang.strip().lower() if lang else _infer_lang_from_filename(filename))
148
+ item_data["is_binary"] = False
149
  elif binary_msg is not None:
150
  item_data["code"] = binary_msg.strip()
151
  item_data["language"] = "binary"
152
  item_data["is_binary"] = True
153
+ else: continue
 
 
 
154
 
 
155
  current_files_dict[filename] = item_data
156
  current_message_proposed_filenames.append(filename)
157
 
 
 
158
  updated_parsed_blocks = list(current_files_dict.values())
159
  if structure_block_state:
160
+ updated_parsed_blocks.insert(0, structure_block_state)
161
 
 
162
  updated_parsed_blocks.sort(key=lambda b: (0, b["filename"]) if b.get("is_structure_block") else (1, b["filename"]))
163
 
164
  return updated_parsed_blocks, current_message_proposed_filenames
165
 
 
166
  def _export_selected_logic(selected_filenames, space_line_name_for_md, parsed_blocks_for_export):
 
167
  results = {"output_str": "", "error_message": None, "download_filepath": None}
 
168
  file_blocks_for_export = [b for b in parsed_blocks_for_export if not b.get("is_structure_block")]
 
 
169
  all_filenames_in_state = sorted(list(set(b["filename"] for b in file_blocks_for_export)))
170
 
 
 
 
 
 
 
 
 
 
171
  output_lines = [f"# Space: {space_line_name_for_md}"]
172
 
 
173
  structure_block = next((b for b in parsed_blocks_for_export if b.get("is_structure_block")), None)
174
  if structure_block:
175
  output_lines.extend(["## File Structure", bbb, structure_block["code"].strip(), bbb, ""])
176
  else:
 
177
  output_lines.extend(["## File Structure", bbb, "πŸ“ Root"])
178
  if all_filenames_in_state:
179
+ for fname in all_filenames_in_state: output_lines.append(f" πŸ“„ {fname}")
180
  output_lines.extend([bbb, ""])
181
 
182
  output_lines.append("Below are the contents of all files in the space:\n")
183
 
 
 
184
  blocks_to_export_content = sorted([b for b in file_blocks_for_export if not selected_filenames or b["filename"] in selected_filenames], key=lambda b: b["filename"])
185
 
186
  exported_content_count = 0
 
188
  output_lines.append(f"### File: {block['filename']}")
189
  content = block.get('code', '')
190
  if block.get('is_binary') or content.startswith(("[Binary file", "[Error loading content:", "[Binary or Skipped file]")):
 
191
  output_lines.append(content)
192
  else:
193
+ lang = block.get('language', 'plaintext') or 'plaintext'
 
194
  output_lines.extend([f"{bbb}{lang}", content, bbb])
195
+ output_lines.append("")
196
  exported_content_count += 1
197
 
198
  if not exported_content_count:
199
+ if selected_filenames: output_lines.append("*No selected files have editable content in the state.*")
200
+ elif not all_filenames_in_state: output_lines.append("*No files in state to list structure or export.*")
201
+
202
 
203
  final_output_str = "\n".join(output_lines)
204
  results["output_str"] = final_output_str
205
  try:
 
206
  with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".md", encoding='utf-8') as tmpfile:
207
+ tmpfile.write(final_output_str); results["download_filepath"] = tmpfile.name
 
208
  except Exception as e:
209
  print(f"Error creating temp file for download: {e}")
210
  results["error_message"] = "Could not prepare file for download."
211
+ results["download_filepath"] = None
212
 
213
  return results
214
 
 
217
  for user_msg, bot_msg in gr_history:
218
  if user_msg: messages.append({"role": "user", "content": user_msg})
219
  if bot_msg and isinstance(bot_msg, str): messages.append({"role": BOT_ROLE_NAME, "content": bot_msg})
 
220
  if current_user_message: messages.append({"role": "user", "content": current_user_message})
221
  return messages
222
 
223
  def _generate_ui_outputs_from_cache(owner, space_name):
 
224
  global parsed_code_blocks_state_cache
 
225
  preview_md_val = "*No files in cache to display.*"
 
226
  space_line_name = f"{owner}/{space_name}" if owner and space_name else (owner or space_name or "your-space")
227
  export_result = _export_selected_logic(None, space_line_name, parsed_code_blocks_state_cache)
228
  formatted_md_val = export_result["output_str"]
229
  download_file = export_result["download_filepath"]
230
+ formatted_md_val = formatted_md_val or "*Load or define a Space to see its Markdown structure.*"
231
 
232
  if parsed_code_blocks_state_cache:
233
  preview_md_lines = ["## Detected/Updated Files & Content (Latest Versions):"]
 
245
 
246
  content = block.get('code', '')
247
  if block.get('is_binary') or content.startswith(("[Binary file", "[Error loading content:", "[Binary or Skipped file]")):
248
+ preview_md_lines.append(f"\n`{escape_html_for_markdown(content.strip())}`\n")
249
  else:
 
250
  lang = block.get('language', 'plaintext') or 'plaintext'
251
+ preview_md_lines.append(f"\n{bbb}{lang}\n{content.strip()}\n{bbb}\n")
252
  preview_md_val = "\n".join(preview_md_lines)
253
 
 
254
  return formatted_md_val, preview_md_val, gr.update(value=download_file, interactive=download_file is not None)
255
 
 
 
256
  def generate_and_stage_changes(ai_response_content, current_files_state, hf_owner_name, hf_repo_name):
 
 
 
 
257
  changeset = []
258
  current_files_dict = {f["filename"]: f for f in current_files_state if not f.get("is_structure_block")}
 
 
 
259
  ai_parsed_md = build_logic_parse_markdown(ai_response_content)
260
+ ai_proposed_files_list = ai_parsed_md.get("files", [])
 
 
261
  ai_proposed_files_dict = {f["path"]: f for f in ai_proposed_files_list}
262
 
 
 
263
  action_pattern = re.compile(r"### HF_ACTION:\s*(?P<command_line>[^\n]+)", re.MULTILINE)
264
  for match in action_pattern.finditer(ai_response_content):
265
+ try:
266
+ cmd_parts = shlex.split(match.group("command_line").strip())
267
+ if not cmd_parts: continue
268
+ command, args = cmd_parts[0].upper(), cmd_parts[1:]
269
+
270
+ if command == "CREATE_SPACE" and args:
271
+ repo_id = args[0]
272
+ sdk = "gradio"
273
+ private = False
274
+ if '--sdk' in args:
275
+ try: sdk = args[args.index('--sdk') + 1]
276
+ except IndexError: print("Warning: CREATE_SPACE --sdk requires an argument.")
277
+ if '--private' in args:
278
+ try: private_str = args[args.index('--private') + 1].lower()
279
+ except IndexError: print("Warning: CREATE_SPACE --private requires an argument.")
280
+ else: private = private_str == 'true'
281
+ changeset.append({"type": "CREATE_SPACE", "repo_id": repo_id, "sdk": sdk, "private": private})
282
+
283
+ elif command == "DELETE_FILE" and args:
284
+ file_path = args[0]
285
+ changeset.append({"type": "DELETE_FILE", "path": file_path})
286
+
287
+ elif command == "SET_PRIVATE" and args:
288
+ private = args[0].lower() == 'true'
289
+ changeset.append({"type": "SET_PRIVACY", "private": private, "repo_id": f"{hf_owner_name}/{hf_repo_name}"})
290
+
291
+ elif command == "DELETE_SPACE":
292
+ changeset.append({"type": "DELETE_SPACE", "owner": hf_owner_name, "space_name": hf_repo_name})
293
+
294
+ except Exception as e:
295
+ print(f"Error parsing HF_ACTION line '{match.group('command_line').strip()}': {e}")
296
+
297
+
 
 
 
 
 
 
 
 
 
 
 
298
  for file_info in ai_proposed_files_list:
299
  filename = file_info["path"]
300
  proposed_content = file_info["content"]
301
 
302
  if filename in current_files_dict:
 
303
  current_content = current_files_dict[filename]["code"]
304
+ is_proposed_placeholder = proposed_content.startswith(("[Binary file", "[Error loading content:", "[Binary or Skipped file]"))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
305
 
306
+ if not is_proposed_placeholder and proposed_content != current_content:
307
+ lang = current_files_dict[filename].get("language") or _infer_lang_from_filename(filename)
308
+ changeset.append({"type": "UPDATE_FILE", "path": filename, "content": proposed_content, "lang": lang})
309
+ elif is_proposed_placeholder:
310
+ print(f"Skipping staging update for {filename}: Proposed content is a placeholder.")
311
 
312
  else:
 
 
313
  proposed_content = file_info["content"]
314
  if not (proposed_content.startswith("[Binary file") or proposed_content.startswith("[Error loading content:") or proposed_content.startswith("[Binary or Skipped file]")):
315
+ lang = _infer_lang_from_filename(filename)
316
  changeset.append({"type": "CREATE_FILE", "path": filename, "content": proposed_content, "lang": lang})
 
317
  else:
318
  print(f"Skipping staging create for {filename}: Proposed content is a placeholder.")
319
 
 
 
320
  if not changeset:
321
  md_summary = ["### πŸ“‹ Proposed Changes Plan", "\nThe AI did not propose any specific changes to files or the space.\n"]
322
  else:
323
  md_summary = ["### πŸ“‹ Proposed Changes Plan\n"]
324
  md_summary.append("The AI has proposed the following changes. Please review and confirm.")
325
 
 
326
  file_changes = [c for c in changeset if c['type'] in ['CREATE_FILE', 'UPDATE_FILE', 'DELETE_FILE']]
327
  space_actions = [c for c in changeset if c['type'] not in ['CREATE_FILE', 'UPDATE_FILE', 'DELETE_FILE']]
328
 
 
335
  md_summary.append(f"- **πŸ”’ Set Privacy:** Set `{change.get('repo_id', '...')}` to `private={change.get('private', False)}`")
336
  elif change["type"] == "DELETE_SPACE":
337
  md_summary.append(f"- **πŸ’₯ DELETE ENTIRE SPACE:** `{change.get('owner', '...')}/{change.get('space_name', '...')}` **(DESTRUCTIVE ACTION)**")
338
+ md_summary.append("")
339
 
340
  if file_changes:
341
  md_summary.append("**File Changes:**")
 
350
  return changeset, "\n".join(md_summary)
351
 
352
 
 
 
353
  def handle_chat_submit(user_message, chat_history, hf_api_key_input, provider_api_key_input, provider_select, model_select, system_prompt, hf_owner_name, hf_repo_name):
354
  global parsed_code_blocks_state_cache
355
  _chat_msg_in, _chat_hist = "", list(chat_history)
356
+ _status = "Initializing..."
357
 
 
358
  yield (
359
+ _chat_msg_in, _chat_hist, _status,
360
+ gr.update(), gr.update(), gr.update(interactive=False), gr.update(value="*No changes proposed.*"),
361
+ [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
362
  )
363
 
364
  if not user_message.strip():
365
+ _status = "Cannot send an empty message."
366
  yield (
367
+ _chat_msg_in, _chat_hist, _status,
368
  gr.update(), gr.update(), gr.update(), gr.update(),
369
  [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
370
  )
371
  return
372
 
373
  _chat_hist.append((user_message, None))
374
+ _status = f"Sending to {model_select}..."
375
  yield (
376
+ _chat_msg_in, _chat_hist, _status,
377
  gr.update(), gr.update(), gr.update(), gr.update(),
378
  [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
379
  )
380
 
 
381
  current_sys_prompt = system_prompt.strip() or DEFAULT_SYSTEM_PROMPT
382
  space_id_for_context = f"{hf_owner_name}/{hf_repo_name}" if hf_owner_name and hf_repo_name else "your-space"
383
  export_result = _export_selected_logic(None, space_id_for_context, parsed_code_blocks_state_cache)
 
387
  user_message_with_context = user_message.strip() + current_files_context
388
  api_msgs = _convert_gr_history_to_api_messages(current_sys_prompt, _chat_hist[:-1], user_message_with_context)
389
 
390
+ full_bot_response_content = ""
391
  try:
 
 
392
  streamer = generate_stream(provider_select, model_select, provider_api_key_input, api_msgs)
393
  for chunk in streamer:
394
  if chunk is None: continue
395
+ full_bot_response_content += str(chunk)
 
 
 
 
 
396
  _chat_hist[-1] = (user_message, full_bot_response_content)
397
+ _status = f"Streaming from {model_select}..."
398
  yield (
399
+ _chat_msg_in, _chat_hist, _status,
400
  gr.update(), gr.update(), gr.update(), gr.update(),
401
  [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
402
  )
403
 
 
404
  if full_bot_response_content.startswith("Error:") or full_bot_response_content.startswith("API HTTP Error"):
405
  _status = full_bot_response_content
406
  yield (_chat_msg_in, _chat_hist, _status, gr.update(), gr.update(), gr.update(), gr.update(), [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False))
407
  return
408
 
 
 
409
  _status = "Stream complete. Parsing response and staging changes..."
410
  yield (_chat_msg_in, _chat_hist, _status, gr.update(), gr.update(), gr.update(), gr.update(), [], gr.update(visible=False), gr.update(visible=False), gr.update(visible=False))
411
 
 
 
412
  parsed_code_blocks_state_cache, proposed_filenames_in_turn = _parse_and_update_state_cache(full_bot_response_content, parsed_code_blocks_state_cache)
 
 
413
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(hf_owner_name, hf_repo_name)
414
 
 
 
415
  staged_changeset, summary_md = generate_and_stage_changes(full_bot_response_content, parsed_code_blocks_state_cache, hf_owner_name, hf_repo_name)
416
 
 
417
  if not staged_changeset:
418
+ _status = summary_md
419
  yield (
420
  _chat_msg_in, _chat_hist, _status,
421
  _detected, _formatted, _download,
422
+ [],
423
+ gr.update(value=summary_md),
424
+ gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
425
  )
426
  else:
427
  _status = "Change plan generated. Please review and confirm below."
428
  yield (
429
  _chat_msg_in, _chat_hist, _status,
430
  _detected, _formatted, _download,
431
+ staged_changeset,
432
+ gr.update(value=summary_md),
433
+ gr.update(visible=True),
434
+ gr.update(visible=True),
435
+ gr.update(visible=True)
436
  )
437
 
438
  except Exception as e:
439
+ error_msg = f"An unexpected error occurred during chat submission: {e}"
440
  print(f"Error in handle_chat_submit: {e}")
441
  import traceback
442
  traceback.print_exc()
443
  if _chat_hist:
 
444
  if _chat_hist[-1] and _chat_hist[-1][0] == user_message:
445
  _chat_hist[-1] = (user_message, (full_bot_response_content + "\n\n" if full_bot_response_content and full_bot_response_content != user_message else "") + error_msg)
446
+ else:
447
  _chat_hist.append((user_message, error_msg))
448
 
 
 
449
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(hf_owner_name, hf_repo_name)
450
 
451
  yield (
452
  _chat_msg_in, _chat_hist, error_msg,
453
  _detected, _formatted, _download,
454
+ [],
455
+ gr.update(value="*Error occurred, changes plan cleared.*"),
456
+ gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)
457
  )
458
 
459
 
460
  def handle_confirm_changes(hf_api_key, owner_name, space_name, changeset):
 
461
  global parsed_code_blocks_state_cache
462
 
463
+ _status = "Applying changes..."
464
+ yield _status, gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(value="*Applying changes...*")
 
 
465
 
466
  if not changeset:
 
467
  return "No changes to apply.", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(value="No changes were staged.")
468
 
 
 
469
  status_message = apply_staged_changes(hf_api_key, owner_name, space_name, changeset)
470
 
 
 
 
471
  _status_reload = f"{status_message} | Reloading Space state..."
472
  yield _status_reload, gr.update(visible=False), gr.update(visible=False), gr.update(visible=False), gr.update(value="*Reloading Space state...*")
473
 
 
 
 
 
474
  refreshed_file_list = []
475
  reload_error = None
476
  repo_id_for_reload = f"{owner_name}/{space_name}" if owner_name and space_name else None
 
479
  sdk, file_list, err_list = get_space_repository_info(hf_api_key, space_name, owner_name)
480
  if err_list:
481
  reload_error = f"Error reloading file list after changes: {err_list}"
482
+ parsed_code_blocks_state_cache = []
483
  else:
484
  refreshed_file_list = file_list
485
  loaded_files = []
486
  for file_path in refreshed_file_list:
487
  content, err_get = get_space_file_content(hf_api_key, space_name, owner_name, file_path)
488
  lang = _infer_lang_from_filename(file_path)
489
+ is_binary = lang == "binary" or (err_get is not None)
490
+ code = f"[Error loading content: {err_get}]" if err_get else (content or "")
491
  loaded_files.append({"filename": file_path, "code": code, "language": lang, "is_binary": is_binary, "is_structure_block": False})
492
+ parsed_code_blocks_state_cache = loaded_files
 
 
 
 
 
 
 
 
 
 
 
 
 
 
493
 
494
  else:
495
  reload_error = "Cannot reload Space state: Owner or Space Name missing."
496
 
 
 
497
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(owner_name, space_name)
498
 
499
  final_overall_status = status_message + (f" | Reload Status: {reload_error}" if reload_error else " | Reload Status: Space state refreshed.")
500
 
 
501
  cleared_changeset = []
502
 
 
503
  return (
504
  final_overall_status,
505
  _formatted,
506
  _detected,
507
  _download,
508
+ gr.update(visible=False),
509
+ gr.update(visible=False),
510
+ gr.update(visible=False),
511
+ cleared_changeset,
512
+ gr.update(value="*No changes proposed.*")
513
  )
514
 
515
 
516
  def handle_cancel_changes():
517
+ global parsed_code_blocks_state_cache
 
518
  return (
519
  "Changes cancelled.",
520
+ [],
521
+ gr.update(value="*No changes proposed.*"),
522
+ gr.update(visible=False),
523
+ gr.update(visible=False),
524
+ gr.update(visible=False)
525
  )
526
 
527
 
 
537
  _formatted_md_val, _detected_preview_val, _status_val = "*Loading files...*", "*Loading files...*", f"Loading Space: {ui_owner_name}/{ui_space_name}..."
538
  _file_browser_update, _iframe_html_update, _download_btn_update = gr.update(visible=False, choices=[], value=None), gr.update(value=None, visible=False), gr.update(interactive=False, value=None)
539
  _build_status_clear, _edit_status_clear, _runtime_status_clear = "*Build status...*", "*Select a file...*", "*Runtime status...*"
540
+ _changeset_clear = []
541
+ _changeset_summary_clear = "*No changes proposed.*"
542
+ _confirm_ui_hidden = gr.update(visible=False)
 
 
543
 
 
544
  outputs = [
545
  _formatted_md_val, _detected_preview_val, _status_val, _file_browser_update,
546
+ gr.update(value=ui_owner_name), gr.update(value=ui_space_name),
547
  _iframe_html_update, _download_btn_update, _build_status_clear,
548
+ _edit_status_clear, _runtime_status_clear,
549
+ changeset_state, changeset_display, confirm_accordion, confirm_button, cancel_button
550
  ]
551
  yield outputs
552
 
553
  owner_to_use = ui_owner_name
554
  if not owner_to_use:
555
+ token, token_err = build_logic_get_api_token(hf_api_key_ui)
556
+ if token_err:
557
+ _status_val = f"Load Error: {token_err}"
558
  outputs[2] = _status_val; yield outputs; return
559
  try:
560
  user_info = build_logic_whoami(token=token)
561
  owner_to_use = user_info.get('name')
562
  if not owner_to_use: raise Exception("Could not find user name from token.")
563
+ outputs[4] = gr.update(value=owner_to_use)
564
  _status_val += f" (Auto-detected owner: {owner_to_use})"
565
  except Exception as e:
566
+ _status_val = f"Load Error: Error auto-detecting owner: {e}"; outputs[2] = _status_val; yield outputs; return
567
 
568
  if not owner_to_use or not ui_space_name:
569
+ _status_val = "Load Error: Owner and Space Name are required."; outputs[2] = _status_val; yield outputs; return
570
 
571
  sdk, file_list, err = get_space_repository_info(hf_api_key_ui, ui_space_name, owner_to_use)
572
 
 
573
  outputs[4] = gr.update(value=owner_to_use)
574
  outputs[5] = gr.update(value=ui_space_name)
575
 
576
  if err:
577
  _status_val = f"Load Error: {err}"
578
+ parsed_code_blocks_state_cache = []
579
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name)
580
+ outputs[0], outputs[1], outputs[2], outputs[7] = _formatted, _detected, _status_val, _download
581
+ outputs[3] = gr.update(visible=False, choices=[], value=None)
582
+ outputs[6] = gr.update(value=None, visible=False)
583
  yield outputs; return
584
 
 
585
  loaded_files = []
586
  for file_path in file_list:
587
  content, err_get = get_space_file_content(hf_api_key_ui, ui_space_name, owner_to_use, file_path)
588
  lang = _infer_lang_from_filename(file_path)
589
+ is_binary = lang == "binary" or (err_get is not None)
590
+ code = f"[Error loading content: {err_get}]" if err_get else (content or "")
591
  loaded_files.append({"filename": file_path, "code": code, "language": lang, "is_binary": is_binary, "is_structure_block": False})
592
 
 
 
593
  parsed_code_blocks_state_cache = loaded_files
594
 
595
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(owner_to_use, ui_space_name)
596
  _status_val = f"Successfully loaded {len(file_list)} files from {owner_to_use}/{ui_space_name}. SDK: {sdk or 'unknown'}."
597
+ outputs[0], outputs[1], outputs[2], outputs[7] = _formatted, _detected, _status_val, _download
598
 
 
599
  outputs[3] = gr.update(visible=True, choices=sorted(file_list or []), value=None)
600
 
 
601
  if owner_to_use and ui_space_name:
602
  sub_owner = re.sub(r'[^a-z0-9\-]+', '-', owner_to_use.lower()).strip('-') or 'owner'
603
  sub_repo = re.sub(r'[^a-z0-9\-]+', '-', ui_space_name.lower()).strip('-') or 'space'
 
606
  else:
607
  outputs[6] = gr.update(value=None, visible=False)
608
 
 
609
  yield outputs
610
 
 
 
611
  def handle_build_space_button(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, space_sdk_ui, is_private_ui, formatted_markdown_content):
612
+ global parsed_code_blocks_state_cache
613
+ _build_status = "Starting manual space build process..."
614
+ _iframe_html, _file_browser_update = gr.update(value=None, visible=False), gr.update(visible=False, choices=[], value=None)
615
  _changeset_clear = []
616
  _changeset_summary_clear = "*Manual build initiated, changes plan cleared.*"
617
  _confirm_ui_hidden = gr.update(visible=False)
618
 
619
  yield (_build_status, _iframe_html, _file_browser_update, gr.update(value=ui_owner_name_part), gr.update(value=ui_space_name_part),
620
+ _changeset_clear, _changeset_summary_clear, _confirm_ui_hidden, _confirm_ui_hidden, _confirm_ui_hidden,
621
+ gr.update(), gr.update(), gr.update())
622
+
623
 
624
  if not ui_space_name_part or "/" in ui_space_name_part:
625
  _build_status = f"Build Error: Invalid Space Name '{ui_space_name_part}'."
626
  yield (_build_status, _iframe_html, _file_browser_update, gr.update(), gr.update(),
627
+ gr.update(), gr.update(), gr.update(), gr.update(), gr.update(),
628
+ gr.update(), gr.update(), gr.update()); return
629
 
630
+ parsed_content = build_logic_parse_markdown(formatted_markdown_content)
631
+ proposed_files_list = parsed_content.get("files", [])
 
632
 
633
+ manual_changeset = []
634
+ if ui_owner_name_part and ui_space_name_part:
635
+ manual_changeset.append({"type": "CREATE_SPACE", "repo_id": f"{ui_owner_name_part}/{ui_space_name_part}", "sdk": space_sdk_ui, "private": is_private_ui})
636
+ for file_info in proposed_files_list:
637
+ manual_changeset.append({"type": "CREATE_FILE", "path": file_info["path"], "content": file_info["content"], "lang": _infer_lang_from_filename(file_info["path"])})
638
 
639
+ if not manual_changeset:
640
+ _build_status = "Build Error: No target space specified or no files parsed from markdown."
641
+ yield (_build_status, _iframe_html, _file_browser_update, gr.update(), gr.update(),
642
+ gr.update(), gr.update(), gr.update(), gr.update(), gr.update(),
643
+ gr.update(), gr.update(), gr.update()); return
 
 
 
 
 
 
 
 
 
 
 
 
644
 
 
645
 
646
+ result_message = apply_staged_changes(hf_api_key_ui, ui_owner_name_part, ui_space_name_part, manual_changeset)
647
+ _build_status = f"Manual Build/Update Result: {result_message}"
648
+
649
+ owner_to_use = ui_owner_name_part
650
+ space_to_use = ui_space_name_part
651
+ _formatted_md, _detected_preview, _download = formatted_markdown_content, "*Loading files after build...*", gr.update(interactive=False, value=None)
652
+
653
+ yield (_build_status, _iframe_html, _file_browser_update, gr.update(value=owner_to_use), gr.update(value=space_to_use),
654
+ _changeset_clear, _changeset_summary_clear, _confirm_ui_hidden, _confirm_ui_hidden, _confirm_ui_hidden,
655
+ _formatted_md, _detected_preview, _download)
656
+
657
+ sdk_built, file_list, err_list = get_space_repository_info(hf_api_key_ui, space_to_use, owner_to_use)
658
+
659
+ if err_list:
660
+ _build_status += f" | Error reloading file list after build: {err_list}"
661
+ parsed_code_blocks_state_cache = []
662
+ _file_browser_update = gr.update(visible=False, choices=[], value=None)
663
+ _iframe_html = gr.update(value=None, visible=False)
664
+ else:
665
+ loaded_files = []
666
+ for file_path in file_list:
667
+ content, err_get = get_space_file_content(hf_api_key_ui, space_to_use, owner_to_use, file_path)
668
+ lang = _infer_lang_from_filename(file_path)
669
+ is_binary = lang == "binary" or (err_get is not None)
670
+ code = f"[Error loading content: {err_get}]" if err_get else (content or "")
671
+ loaded_files.append({"filename": file_path, "code": code, "language": lang, "is_binary": is_binary, "is_structure_block": False})
672
+ parsed_code_blocks_state_cache = loaded_files
673
+
674
+ _file_browser_update = gr.update(visible=True, choices=sorted(file_list or []), value=None)
675
+
676
+ if owner_to_use and space_to_use:
677
  sub_owner = re.sub(r'[^a-z0-9\-]+', '-', owner_to_use.lower()).strip('-') or 'owner'
678
+ sub_repo = re.sub(r'[^a-z0-9\-]+', '-', ui_space_name_part.lower()).strip('-') or 'space'
679
  iframe_url = f"https://{sub_owner}-{sub_repo}{'.static.hf.space' if sdk_built == 'static' else '.hf.space'}"
680
  _iframe_html = gr.update(value=f'<iframe src="{iframe_url}?__theme=light&embed=true" width="100%" height="700px"></iframe>', visible=True)
681
+ else:
682
+ _iframe_html = gr.update(value=None, visible=False)
683
 
684
+ _formatted_md, _detected_preview, _download = _generate_ui_outputs_from_cache(owner_to_use, space_to_use)
685
 
686
+ yield (_build_status, _iframe_html, _file_browser_update, gr.update(value=owner_to_use), gr.update(value=space_to_use),
 
 
 
 
687
  _changeset_clear, _changeset_summary_clear, _confirm_ui_hidden, _confirm_ui_hidden, _confirm_ui_hidden,
688
+ _formatted_md, _detected_preview, _download
689
  )
690
 
691
 
692
  def handle_load_file_for_editing(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, selected_file_path):
693
  if not selected_file_path:
694
+ return "", "Select a file.", "", gr.update(language="plaintext")
 
695
 
696
  content, err = get_space_file_content(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, selected_file_path)
697
  if err:
698
+ return "", f"Load Error: {err}", "", gr.update(language="plaintext")
 
 
699
 
700
  lang = _infer_lang_from_filename(selected_file_path)
701
  commit_msg = f"Update {selected_file_path}"
702
+ return content, f"Loaded `{selected_file_path}`", commit_msg, gr.update(language=lang)
703
 
704
  def handle_commit_file_changes(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_edit_path, edited_content, commit_message):
705
  if not file_to_edit_path:
706
+ return "Commit Error: No file selected for commit.", gr.update(), gr.update(), gr.update(), gr.update()
707
 
708
  status_msg = update_space_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_edit_path, edited_content, commit_message)
709
+
710
  global parsed_code_blocks_state_cache
711
  if "Successfully" in status_msg:
 
 
712
  found = False
713
  for block in parsed_code_blocks_state_cache:
714
  if block["filename"] == file_to_edit_path and not block.get("is_structure_block"):
715
  block["code"] = edited_content
716
+ block["language"] = _infer_lang_from_filename(file_to_edit_path)
717
+ block["is_binary"] = False
718
  found = True
719
  break
720
  if not found:
 
721
  parsed_code_blocks_state_cache.append({
722
  "filename": file_to_edit_path,
723
  "code": edited_content,
 
725
  "is_binary": False,
726
  "is_structure_block": False
727
  })
 
728
  parsed_code_blocks_state_cache.sort(key=lambda b: (0, b["filename"]) if b.get("is_structure_block") else (1, b["filename"]))
729
 
730
+ file_list, _ = list_space_files_for_browsing(hf_api_key_ui, ui_space_name_part, ui_owner_name_part)
731
+ file_browser_update = gr.update(choices=sorted(file_list or []))
732
+
733
+ else:
734
+ file_browser_update = gr.update()
735
+
736
 
 
737
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(ui_owner_name_part, ui_space_name_part)
738
+ return status_msg, file_browser_update, _formatted, _detected, _download
739
 
740
  def handle_delete_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_delete_path):
741
  if not file_to_delete_path:
742
+ return "Delete Error: No file selected to delete.", gr.update(), "", "", "plaintext", gr.update(), gr.update(), gr.update()
743
 
744
  status_msg = build_logic_delete_space_file(hf_api_key_ui, ui_space_name_part, ui_owner_name_part, file_to_delete_path)
745
+
746
  global parsed_code_blocks_state_cache
747
+ file_browser_update = gr.update()
748
+ file_content_editor_update = gr.update()
749
+ commit_message_update = gr.update()
750
+ editor_lang_update = gr.update()
751
+
752
  if "Successfully" in status_msg:
 
753
  parsed_code_blocks_state_cache = [b for b in parsed_code_blocks_state_cache if b["filename"] != file_to_delete_path]
 
754
  file_content_editor_update = gr.update(value="")
755
  commit_message_update = gr.update(value="")
756
  editor_lang_update = gr.update(language="plaintext")
757
+ file_list, _ = list_space_files_for_browsing(hf_api_key_ui, ui_space_name_part, ui_owner_name_part)
758
+ file_browser_update = gr.update(choices=sorted(file_list or []), value=None)
 
 
 
 
759
 
 
760
  _formatted, _detected, _download = _generate_ui_outputs_from_cache(ui_owner_name_part, ui_space_name_part)
761
  return (
762
  status_msg,
763
+ file_browser_update,
764
  file_content_editor_update,
765
  commit_message_update,
766
  editor_lang_update,
 
771
 
772
  def handle_refresh_space_status(hf_api_key_ui, ui_owner_name, ui_space_name):
773
  if not ui_owner_name or not ui_space_name:
774
+ return "**Status Error:** Owner and Space Name must be provided to get status."
775
 
776
  status_details, err = get_space_runtime_status(hf_api_key_ui, ui_space_name, ui_owner_name)
777
+ if err: return f"**Status Error:** {err}"
778
  if not status_details: return "*Could not retrieve status details.*"
779
 
780
  md = f"### Status for {ui_owner_name}/{ui_space_name}\n"
 
781
  md += f"- **Stage:** `{status_details.get('stage', 'N/A')}`\n"
782
+ md += f"- **Status:** `{status_details.get('status', 'N/A')}`\n"
783
  md += f"- **Hardware:** `{status_details.get('hardware', 'N/A')}`\n"
784
  requested_hw = status_details.get('requested_hardware')
785
  if requested_hw: md += f"- **Requested Hardware:** `{requested_hw}`\n"
786
  error_msg = status_details.get('error_message')
787
+ if error_msg: md += f"- **Error:** `{escape_html_for_markdown(error_msg)}`\n"
788
  log_link = status_details.get('full_log_link')
789
  if log_link and log_link != "#": md += f"- [View Full Logs]({log_link})\n"
790
 
791
  return md
792
 
793
 
 
794
  custom_theme = gr.themes.Base(primary_hue="teal", secondary_hue="purple", neutral_hue="zinc", text_size="sm", spacing_size="md", radius_size="sm", font=["System UI", "sans-serif"])
795
  custom_css = """
796
  body { background: linear-gradient(to bottom right, #2c3e50, #34495e); color: #ecf0f1; }
 
800
  .gr-button.gr-button-primary { background-color: #1abc9c !important; color: white !important; border-color: #16a085 !important; }
801
  .gr-button.gr-button-secondary { background-color: #9b59b6 !important; color: white !important; border-color: #8e44ad !important; }
802
  .gr-button.gr-button-stop { background-color: #e74c3c !important; color: white !important; border-color: #c0392b !important; }
803
+ .gr-markdown { background-color: rgba(44, 62, 80, 0.7) !important; padding: 10px; border-radius: 5px; overflow-x: auto; }
804
  .gr-markdown h1, .gr-markdown h2, .gr-markdown h3, .gr-markdown h4, .gr-markdown h5, .gr-markdown h6 { color: #ecf0f1 !important; border-bottom-color: rgba(189, 195, 199, 0.3) !important; }
805
  .gr-markdown pre code { background-color: rgba(52, 73, 94, 0.95) !important; border-color: rgba(189, 195, 199, 0.3) !important; }
806
  .gr-chatbot { background-color: rgba(44, 62, 80, 0.7) !important; border-color: rgba(189, 195, 199, 0.2) !important; }
807
  .gr-chatbot .message { background-color: rgba(52, 73, 94, 0.9) !important; color: #ecf0f1 !important; border-color: rgba(189, 195, 199, 0.3) !important; }
808
  .gr-chatbot .message.user { background-color: rgba(46, 204, 113, 0.9) !important; color: black !important; }
 
809
  .gradio-container .gr-accordion { border-color: rgba(189, 195, 199, 0.3) !important; }
810
  .gradio-container .gr-accordion.closed { background-color: rgba(52, 73, 94, 0.9) !important; }
811
  .gradio-container .gr-accordion.open { background-color: rgba(44, 62, 80, 0.8) !important; }
812
 
813
  """
814
 
 
 
815
  with gr.Blocks(theme=custom_theme, css=custom_css) as demo:
 
816
  changeset_state = gr.State([])
 
 
 
 
817
 
818
  gr.Markdown("# πŸ€– AI-Powered Hugging Face Space Commander")
819
  gr.Markdown("Use an AI assistant to create, modify, build, and manage your Hugging Face Spaces directly from this interface.")
 
826
  with gr.Accordion("βš™οΈ Configuration", open=True):
827
  hf_api_key_input = gr.Textbox(label="Hugging Face Token", type="password", placeholder="hf_... (uses env var HF_TOKEN if empty)")
828
  owner_name_input = gr.Textbox(label="HF Owner Name", placeholder="e.g., your-username")
829
+ space_name_input = gr.Textbox(label="HF Space Name", value="")
830
  load_space_button = gr.Button("πŸ”„ Load Existing Space", variant="secondary")
 
 
 
 
 
831
 
832
  with gr.Accordion("πŸ€– AI Model Settings", open=True):
 
833
  available_providers = get_available_providers()
834
  default_provider = 'Groq'
 
835
  if default_provider not in available_providers:
836
  default_provider = available_providers[0] if available_providers else None
837
  elif len(available_providers) < 3:
838
  default_provider = available_providers[0] if available_providers else None
839
 
 
 
840
  initial_models = get_models_for_provider(default_provider) if default_provider else []
841
  initial_model = get_default_model_for_provider(default_provider) if default_provider else None
 
842
  if initial_model not in initial_models:
843
  initial_model = initial_models[0] if initial_models else None
844
 
 
854
  value=initial_model,
855
  allow_custom_value=False
856
  )
 
857
  provider_api_key_input = gr.Textbox(label="Model Provider API Key (Optional)", type="password", placeholder="sk_... (overrides backend settings)")
858
  system_prompt_input = gr.Textbox(label="System Prompt", lines=10, value=DEFAULT_SYSTEM_PROMPT, elem_id="system-prompt")
859
 
 
865
  send_chat_button = gr.Button("Send", variant="primary", scale=1)
866
  status_output = gr.Textbox(label="Last Action Status", interactive=False, value="Ready.")
867
 
 
868
  with gr.Accordion("πŸ“ Proposed Changes (Pending Confirmation)", open=False, visible=False) as confirm_accordion:
869
  changeset_display = gr.Markdown("*No changes proposed.*")
870
  with gr.Row():
 
875
  with gr.TabItem("πŸ“ Generated Markdown & Build"):
876
  with gr.Row():
877
  with gr.Column(scale=2):
 
878
  formatted_space_output_display = gr.Textbox(label="Current Space Definition (Generated Markdown)", lines=20, interactive=True, value="*Load or create a space to see its definition.*")
879
  download_button = gr.DownloadButton(label="Download .md", interactive=False)
880
  with gr.Column(scale=1):
881
+ gr.Markdown("### Manual Build & Status")
 
882
  space_sdk_select = gr.Dropdown(label="Space SDK", choices=["gradio", "streamlit", "docker", "static"], value="gradio", interactive=True)
883
  space_private_checkbox = gr.Checkbox(label="Make Space Private", value=False, interactive=True)
 
884
  build_space_button = gr.Button("πŸš€ Build / Update Space from Markdown", variant="primary")
885
+ build_status_display = gr.Textbox(label="Manual Build/Update Status", interactive=False, value="*Manual build status...*")
886
+ gr.Markdown("---")
 
887
  refresh_status_button = gr.Button("πŸ”„ Refresh Runtime Status")
888
  space_runtime_status_display = gr.Markdown("*Runtime status will appear here.*")
889
 
890
  with gr.TabItem("πŸ” Files Preview"):
 
891
  detected_files_preview = gr.Markdown(value="*A preview of the latest file versions will appear here.*")
892
 
893
  with gr.TabItem("✏️ Live File Editor & Preview"):
894
  with gr.Row():
895
  with gr.Column(scale=1):
896
  gr.Markdown("### Live Editor")
 
897
  file_browser_dropdown = gr.Dropdown(label="Select File in Space", choices=[], interactive=True)
 
898
  file_content_editor = gr.Code(label="File Content Editor", language="python", lines=15, interactive=True, value="")
899
  commit_message_input = gr.Textbox(label="Commit Message", placeholder="e.g., Updated app.py", interactive=True, value="")
900
  with gr.Row():
 
901
  update_file_button = gr.Button("Commit Changes", variant="primary", interactive=True)
902
  delete_file_button = gr.Button("πŸ—‘οΈ Delete Selected File", variant="stop", interactive=True)
903
  edit_status_display = gr.Textbox(label="File Edit/Delete Status", interactive=False, value="")
904
  with gr.Column(scale=1):
905
  gr.Markdown("### Live Space Preview")
 
906
  space_iframe_display = gr.HTML(value="", visible=True)
907
 
 
 
 
908
  provider_select.change(update_models_dropdown, inputs=provider_select, outputs=model_select)
909
 
 
910
  chat_inputs = [
911
  chat_message_input, chatbot_display, hf_api_key_input,
912
  provider_api_key_input, provider_select, model_select, system_prompt_input,
913
+ owner_name_input, space_name_input
914
  ]
915
  chat_outputs = [
916
  chat_message_input, chatbot_display, status_output,
 
920
  send_chat_button.click(handle_chat_submit, inputs=chat_inputs, outputs=chat_outputs)
921
  chat_message_input.submit(handle_chat_submit, inputs=chat_inputs, outputs=chat_outputs)
922
 
 
923
  confirm_inputs = [hf_api_key_input, owner_name_input, space_name_input, changeset_state]
924
  confirm_outputs = [
925
  status_output, formatted_space_output_display, detected_files_preview, download_button,
926
+ confirm_accordion, confirm_button, cancel_button, changeset_state, changeset_display
927
  ]
928
  confirm_button.click(handle_confirm_changes, inputs=confirm_inputs, outputs=confirm_outputs)
929
 
 
933
  ]
934
  cancel_button.click(handle_cancel_changes, inputs=None, outputs=cancel_outputs)
935
 
 
936
  load_space_outputs = [
937
  formatted_space_output_display, detected_files_preview, status_output,
938
+ file_browser_dropdown, owner_name_input, space_name_input,
939
  space_iframe_display, download_button, build_status_display,
940
  edit_status_display, space_runtime_status_display,
941
+ changeset_state, changeset_display, confirm_accordion, confirm_button, cancel_button
 
942
  ]
943
  load_space_button.click(
944
  fn=handle_load_existing_space,
 
946
  outputs=load_space_outputs
947
  )
948
 
 
949
  build_outputs = [
950
  build_status_display, space_iframe_display, file_browser_dropdown,
951
+ owner_name_input, space_name_input,
952
+ changeset_state, changeset_display, confirm_accordion, confirm_button, cancel_button,
953
+ formatted_space_output_display, detected_files_preview, download_button
954
  ]
955
  build_inputs = [
956
  hf_api_key_input, space_name_input, owner_name_input, space_sdk_select,
957
+ space_private_checkbox, formatted_space_output_display
958
  ]
959
  build_space_button.click(fn=handle_build_space_button, inputs=build_inputs, outputs=build_outputs)
960
 
961
+ file_edit_load_outputs = [file_content_editor, edit_status_display, commit_message_input, file_content_editor]
 
 
962
  file_browser_dropdown.change(fn=handle_load_file_for_editing, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown], outputs=file_edit_load_outputs)
963
 
 
964
  commit_file_outputs = [edit_status_display, file_browser_dropdown, formatted_space_output_display, detected_files_preview, download_button]
965
  update_file_button.click(fn=handle_commit_file_changes, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown, file_content_editor, commit_message_input], outputs=commit_file_outputs)
966
 
 
967
  delete_file_outputs = [
968
  edit_status_display, file_browser_dropdown,
969
+ file_content_editor, commit_message_input, file_content_editor,
970
+ formatted_space_output_display, detected_files_preview, download_button
971
  ]
972
  delete_file_button.click(fn=handle_delete_file, inputs=[hf_api_key_input, space_name_input, owner_name_input, file_browser_dropdown], outputs=delete_file_outputs)
973
 
 
974
  refresh_status_button.click(fn=handle_refresh_space_status, inputs=[hf_api_key_input, owner_name_input, space_name_input], outputs=[space_runtime_status_display])
975
 
976
  if __name__ == "__main__":