broadfield-dev commited on
Commit
4038f9b
Β·
verified Β·
1 Parent(s): 3aff388

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +157 -139
app.py CHANGED
@@ -43,7 +43,6 @@ DEFAULT_SYSTEM_PROMPT = os.getenv(
43
  logger.info(f"App Config: WebSearch={WEB_SEARCH_ENABLED}, ToolDecisionProvider={TOOL_DECISION_PROVIDER_ENV}, ToolDecisionModelID={TOOL_DECISION_MODEL_ID_ENV}, MemoryBackend={MEMORY_STORAGE_BACKEND}")
44
 
45
  # --- Helper Functions (format_insights_for_prompt, generate_interaction_metrics, etc.) ---
46
- # ENSURE ALL YOUR HELPER AND LOGIC FUNCTIONS ARE DEFINED HERE AS IN PREVIOUS VERSIONS
47
  def format_insights_for_prompt(retrieved_insights_list: list[str]) -> tuple[str, list[dict]]:
48
  if not retrieved_insights_list:
49
  return "No specific guiding principles or learned insights retrieved.", []
@@ -276,66 +275,121 @@ Combine all findings into a single JSON list of operations. If there are multipl
276
  def handle_gradio_chat_submit(user_msg_txt: str, gr_hist_list: list, sel_prov_name: str, sel_model_disp_name: str, ui_api_key: str|None, cust_sys_prompt: str):
277
  global current_chat_session_history
278
  cleared_input, updated_gr_hist, status_txt = "", list(gr_hist_list), "Initializing..."
279
- def_detect_out_md, def_fmt_out_txt = gr.Markdown("*Processing...*"), gr.Textbox("*Waiting...*")
 
280
  def_dl_btn = gr.DownloadButton(interactive=False, value=None, visible=False)
 
281
  if not user_msg_txt.strip():
282
  status_txt = "Error: Empty message."
283
  updated_gr_hist.append((user_msg_txt or "(Empty)", status_txt))
284
- yield (cleared_input, updated_gr_hist, status_txt, def_detect_out_md, def_fmt_out_txt, def_dl_btn); return
 
 
 
285
  updated_gr_hist.append((user_msg_txt, "<i>Thinking...</i>"))
 
286
  yield (cleared_input, updated_gr_hist, status_txt, def_detect_out_md, def_fmt_out_txt, def_dl_btn)
 
287
  internal_hist = list(current_chat_session_history); internal_hist.append({"role": "user", "content": user_msg_txt})
288
  if len(internal_hist) > (MAX_HISTORY_TURNS * 2 + 1):
289
  if internal_hist[0]["role"] == "system" and len(internal_hist) > (MAX_HISTORY_TURNS * 2 + 1) : internal_hist = [internal_hist[0]] + internal_hist[-(MAX_HISTORY_TURNS * 2):]
290
  else: internal_hist = internal_hist[-(MAX_HISTORY_TURNS * 2):]
 
291
  final_bot_resp_acc, insights_used_parsed = "", []
292
  temp_dl_file_path = None
 
293
  try:
294
  processor_gen = process_user_interaction_gradio(user_input=user_msg_txt, provider_name=sel_prov_name, model_display_name=sel_model_disp_name, chat_history_for_prompt=internal_hist, custom_system_prompt=cust_sys_prompt.strip() or None, ui_api_key_override=ui_api_key.strip() if ui_api_key else None)
295
  curr_bot_disp_msg = ""
296
  for upd_type, upd_data in processor_gen:
297
  if upd_type == "status":
298
  status_txt = upd_data
299
- if updated_gr_hist and updated_gr_hist[-1][0] == user_msg_txt: updated_gr_hist[-1] = (user_msg_txt, f"{curr_bot_disp_msg} <i>{status_txt}</i>" if curr_bot_disp_msg else f"<i>{status_txt}</i>")
 
300
  elif upd_type == "response_chunk":
301
  curr_bot_disp_msg += upd_data
302
- if updated_gr_hist and updated_gr_hist[-1][0] == user_msg_txt: updated_gr_hist[-1] = (user_msg_txt, curr_bot_disp_msg)
 
303
  elif upd_type == "final_response_and_insights":
304
  final_bot_resp_acc, insights_used_parsed = upd_data["response"], upd_data["insights_used"]
305
  status_txt = "Response complete."
306
  if not curr_bot_disp_msg and final_bot_resp_acc : curr_bot_disp_msg = final_bot_resp_acc
307
- if updated_gr_hist and updated_gr_hist[-1][0] == user_msg_txt: updated_gr_hist[-1] = (user_msg_txt, curr_bot_disp_msg or "(No text)")
308
- def_fmt_out_txt = gr.Textbox(value=curr_bot_disp_msg)
 
 
 
309
  if curr_bot_disp_msg and not curr_bot_disp_msg.startswith("Error:"):
310
  with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".md", encoding='utf-8') as tmpfile:
311
  tmpfile.write(curr_bot_disp_msg)
312
  temp_dl_file_path = tmpfile.name
313
  def_dl_btn = gr.DownloadButton(value=temp_dl_file_path, visible=True, interactive=True)
314
- else: def_dl_btn = gr.DownloadButton(interactive=False, value=None, visible=False)
315
- insights_md = "### Insights Considered:\n" + ("\n".join([f"- **[{i.get('type','N/A')}|{i.get('score','N/A')}]** {i.get('text','N/A')[:100]}..." for i in insights_used_parsed[:3]]) if insights_used_parsed else "*None specific.*")
316
- def_detect_out_md = gr.Markdown(insights_md)
 
 
 
317
  yield (cleared_input, updated_gr_hist, status_txt, def_detect_out_md, def_fmt_out_txt, def_dl_btn)
318
  if upd_type == "final_response_and_insights": break
319
  except Exception as e:
320
  logger.error(f"Chat handler error: {e}", exc_info=True); status_txt = f"Error: {str(e)[:100]}"
321
- if updated_gr_hist and updated_gr_hist[-1][0] == user_msg_txt: updated_gr_hist[-1] = (user_msg_txt, status_txt)
322
- else: updated_gr_hist.append((user_msg_txt, status_txt))
323
- yield (cleared_input, updated_gr_hist, status_txt, def_detect_out_md, def_fmt_out_txt, def_dl_btn); return
 
 
 
 
 
 
 
 
324
  if final_bot_resp_acc and not final_bot_resp_acc.startswith("Error:"):
325
  current_chat_session_history.extend([{"role": "user", "content": user_msg_txt}, {"role": "assistant", "content": final_bot_resp_acc}])
326
  hist_len_check = MAX_HISTORY_TURNS * 2
327
  if current_chat_session_history and current_chat_session_history[0]["role"] == "system": hist_len_check +=1
328
  if len(current_chat_session_history) > hist_len_check:
329
  current_chat_session_history = ([current_chat_session_history[0]] if current_chat_session_history[0]["role"] == "system" else []) + current_chat_session_history[-(MAX_HISTORY_TURNS * 2):]
 
330
  threading.Thread(target=deferred_learning_and_memory_task, args=(user_msg_txt, final_bot_resp_acc, sel_prov_name, sel_model_disp_name, insights_used_parsed, ui_api_key.strip() if ui_api_key else None), daemon=True).start()
331
  status_txt = "Response complete. Background learning initiated."
332
- else: status_txt = "Processing finished; no response or error."
 
 
 
 
 
 
 
 
 
333
  yield (cleared_input, updated_gr_hist, status_txt, def_detect_out_md, def_fmt_out_txt, def_dl_btn)
 
334
  if temp_dl_file_path and os.path.exists(temp_dl_file_path):
335
  try: os.unlink(temp_dl_file_path)
336
  except Exception as e_unlink: logger.error(f"Error deleting temp download file {temp_dl_file_path}: {e_unlink}")
337
 
 
338
  def ui_view_rules_action_fn(): return "\n\n---\n\n".join(get_all_rules_cached()) or "No rules found."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
339
  def ui_upload_rules_action_fn(uploaded_file_obj, progress=gr.Progress()):
340
  if not uploaded_file_obj: return "No file provided for rules upload."
341
  try:
@@ -359,7 +413,35 @@ def ui_upload_rules_action_fn(uploaded_file_obj, progress=gr.Progress()):
359
  msg = f"Rules Upload: Processed {total_to_process}. Added: {added_count}, Skipped (duplicates): {skipped_count}, Errors/Invalid: {error_count}."
360
  logger.info(msg); return msg
361
 
362
- def ui_view_memories_action_fn(): return get_all_memories_cached() or []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
363
  def ui_upload_memories_action_fn(uploaded_file_obj, progress=gr.Progress()):
364
  if not uploaded_file_obj: return "No file provided for memories upload."
365
  try:
@@ -378,7 +460,7 @@ def ui_upload_memories_action_fn(uploaded_file_obj, progress=gr.Progress()):
378
  except: format_error_count += 1
379
  if not memory_objects_to_process and format_error_count == 0: return "No valid memory objects found."
380
  total_to_process = len(memory_objects_to_process)
381
- if total_to_process == 0: return "No memory objects to process."
382
  progress(0, desc="Starting memories upload...")
383
  for idx, mem_data in enumerate(memory_objects_to_process):
384
  if isinstance(mem_data, dict) and all(k in mem_data for k in ["user_input", "bot_response", "metrics"]):
@@ -390,49 +472,9 @@ def ui_upload_memories_action_fn(uploaded_file_obj, progress=gr.Progress()):
390
  msg = f"Memories Upload: Processed {total_to_process}. Added: {added_count}, Format Errors: {format_error_count}, Save Errors: {save_error_count}."
391
  logger.info(msg); return msg
392
 
393
- # --- UI Definition ---
394
- # Using the CSS inspired by the "AI Code & Space Generator" example
395
- custom_theme = gr.themes.Base(
396
- primary_hue="teal",
397
- secondary_hue="purple",
398
- neutral_hue="zinc",
399
- text_size="sm",
400
- spacing_size="md",
401
- radius_size="sm",
402
- font=["System UI", "sans-serif"]
403
- )
404
-
405
- custom_css = """
406
- body { background: linear-gradient(to bottom right, #2c3e50, #34495e); color: #ecf0f1; margin:0; padding:0; font-family: 'System UI', sans-serif; overflow-x: hidden;}
407
- .gradio-container { background: transparent !important; padding: 0 !important; /* Remove padding from the outermost container */ }
408
- .main-interface-wrapper { max-width: 100%; padding: 1rem; box-sizing: border-box; min-height: 100vh; display: flex; flex-direction: column;} /* New wrapper */
409
- .gr-column {gap: 0px !important;}
410
- .gr-block.gr-group, .gr-tabs, .gr-accordion { background-color: rgba(44, 62, 80, 0.8) !important; border: 1px solid rgba(189, 195, 199, 0.2) !important; border-radius: 8px !important; padding: 1em; margin-bottom: 1em;}
411
- .gr-tabitem { background-color: rgba(52, 73, 94, 0.75) !important; border-radius: 6px !important; padding: 1em !important; border: 1px solid rgba(189, 195, 199, 0.1) !important;}
412
- .gr-textbox, .gr-dropdown, .gr-button, .gr-code, .gr-chat-message, .gr-json, .gr-file input[type="file"], .gr-file button { border-color: rgba(189, 195, 199, 0.3) !important; background-color: rgba(52, 73, 94, 0.9) !important; color: #ecf0f1 !important; border-radius: 6px !important;}
413
- .gr-textarea textarea, .gr-textbox input { color: #ecf0f1 !important; }
414
- .gr-button.gr-button-primary { background-color: #1abc9c !important; color: white !important; border-color: #16a085 !important; }
415
- .gr-button.gr-button-secondary { background-color: #9b59b6 !important; color: white !important; border-color: #8e44ad !important;}
416
- .gr-button.gr-button-stop { background-color: #e74c3c !important; color: white !important; border-color: #c0392b !important; }
417
- .gr-markdown { padding: 5px; }
418
- .gr-group .gr-markdown { padding: 0px; background-color: transparent !important; }
419
- .gr-markdown h1, .gr-markdown h2, .gr-markdown h3 { color: #ecf0f1 !important; border-bottom-color: rgba(189, 195, 199, 0.3) !important; margin-top: 0.5em; margin-bottom: 0.5em;}
420
- .gr-markdown h1 {font-size: 1.5rem;} .gr-markdown h2 {font-size: 1.25rem;} .gr-markdown h3 {font-size: 1.1rem;}
421
- .gr-markdown p, .gr-markdown li { color: #ecf0f1 !important; }
422
- .gr-markdown pre code { background-color: rgba(52, 73, 94, 0.95) !important; border-color: rgba(189, 195, 199, 0.3) !important; }
423
- .gr-chatbot { background-color: rgba(44, 62, 80, 0.7) !important; border-color: rgba(189, 195, 199, 0.2) !important; }
424
- .gr-chatbot .message { background-color: rgba(52, 73, 94, 0.9) !important; color: #ecf0f1 !important; border-color: rgba(189, 195, 199, 0.3) !important; }
425
- .gr-chatbot .message.user { background-color: rgba(46, 204, 113, 0.9) !important; color: black !important; }
426
- .gr-input-label > .label-text, .gr-dropdown-label > .label-text { color: #bdc3c7 !important; }
427
- .status-bar { padding: 0.5rem 1rem; border-radius: 6px; margin-bottom: 1rem; background-color: rgba(44, 62, 80, 0.8) !important; }
428
- .tabnav button { background-color: rgba(52, 73, 94, 0.8) !important; color: #ecf0f1 !important; border-bottom: 2px solid transparent !important;}
429
- .tabnav button.selected { background-color: rgba(44, 62, 80, 0.95) !important; color: #1abc9c !important; border-bottom: 2px solid #1abc9c !important;}
430
- .app-title-area { padding: 0.5rem 0; text-align: center;} /* Area for title and subtitle */
431
- """
432
-
433
 
434
  with gr.Blocks(
435
- theme=gr.themes.Soft(), # Use a clean, modern theme
436
  css="""
437
  .gr-button { margin: 5px; }
438
  .gr-textbox, .gr-text-area, .gr-dropdown { border-radius: 8px; }
@@ -442,7 +484,6 @@ with gr.Blocks(
442
  .status-text { font-size: 0.9em; color: #555; }
443
  """
444
  ) as demo:
445
- # Header Section
446
  gr.Markdown(
447
  """
448
  # πŸ€– AI Research Agent
@@ -451,56 +492,39 @@ with gr.Blocks(
451
  elem_classes=["header"]
452
  )
453
 
454
- # Status Bar
455
  is_sqlite = MEMORY_STORAGE_BACKEND == "SQLITE"
456
  is_hf_dataset = MEMORY_STORAGE_BACKEND == "HF_DATASET"
457
 
458
  with gr.Row(variant="compact"):
459
  agent_stat_tb = gr.Textbox(
460
- label="Agent Status",
461
- value="Initializing systems...",
462
- interactive=False,
463
- elem_classes=["status-text"],
464
- scale=4
465
  )
466
  with gr.Column(scale=1, min_width=150):
467
  memory_backend_info_tb = gr.Textbox(
468
- label="Memory Backend",
469
- value=MEMORY_STORAGE_BACKEND,
470
- interactive=False,
471
  elem_classes=["status-text"]
472
  )
473
  sqlite_path_display = gr.Textbox(
474
- label="SQLite Path",
475
- value=MEMORY_SQLITE_PATH,
476
- interactive=False,
477
- visible=is_sqlite, # Use precomputed boolean
478
- elem_classes=["status-text"]
479
  )
480
  hf_repos_display = gr.Textbox(
481
- label="HF Repos",
482
- value=f"M: {MEMORY_HF_MEM_REPO}, R: {MEMORY_HF_RULES_REPO}",
483
- interactive=False,
484
- visible=is_hf_dataset, # Use precomputed boolean
485
- elem_classes=["status-text"]
486
  )
487
 
488
  with gr.Row():
489
- # Sidebar (unchanged)
490
  with gr.Sidebar():
491
  gr.Markdown("## βš™οΈ Configuration")
492
  with gr.Group():
493
  gr.Markdown("### AI Model Settings")
494
  api_key_tb = gr.Textbox(
495
- label="AI Provider API Key (Override)",
496
- type="password",
497
- placeholder="Uses .env if blank"
498
  )
499
  prov_sel_dd = gr.Dropdown(
500
- label="AI Provider",
501
- choices=get_available_providers(),
502
- value=get_available_providers()[0] if get_available_providers() else None,
503
- interactive=True
504
  )
505
  model_sel_dd = gr.Dropdown(
506
  label="AI Model",
@@ -511,98 +535,75 @@ with gr.Blocks(
511
  with gr.Group():
512
  gr.Markdown("### System Prompt")
513
  sys_prompt_tb = gr.Textbox(
514
- label="System Prompt Base",
515
- lines=8,
516
- value=DEFAULT_SYSTEM_PROMPT,
517
- interactive=True
518
  )
519
  if MEMORY_STORAGE_BACKEND == "RAM":
520
  save_faiss_sidebar_btn = gr.Button("Save FAISS Indices", variant="secondary")
521
 
522
- # Main Content Area
523
  with gr.Column(scale=3):
524
  with gr.Tabs():
525
  with gr.TabItem("πŸ’¬ Chat & Research"):
526
  with gr.Group():
527
  gr.Markdown("### AI Chat Interface")
528
  main_chat_disp = gr.Chatbot(
529
- label=None,
530
- height=400,
531
- bubble_full_width=False,
532
  avatar_images=(None, "https://raw.githubusercontent.com/huggingface/brand-assets/main/hf-logo-with-title.png"),
533
- show_copy_button=True,
534
- render_markdown=True,
535
- sanitize_html=True
536
  )
537
  with gr.Row(variant="compact"):
538
  user_msg_tb = gr.Textbox(
539
- show_label=False,
540
- placeholder="Ask your research question...",
541
- scale=7,
542
- lines=1,
543
- max_lines=3
544
  )
545
  send_btn = gr.Button("Send", variant="primary", scale=1, min_width=100)
546
  with gr.Accordion("πŸ“ Detailed Response", open=False):
547
  fmt_report_tb = gr.Textbox(
548
- label="Full AI Response",
549
- lines=8,
550
- interactive=True,
551
- show_copy_button=True
552
  )
553
  dl_report_btn = gr.DownloadButton(
554
- "Download Report",
555
- interactive=False,
556
- visible=False
557
  )
558
- detect_out_md = gr.Markdown(visible=False)
559
 
560
  with gr.TabItem("🧠 Knowledge Base"):
561
  with gr.Row(equal_height=True):
562
  with gr.Column():
563
  gr.Markdown("### πŸ“œ Rules Management")
564
  rules_disp_ta = gr.TextArea(
565
- label=None,
566
- lines=10,
567
- placeholder="View or edit rules...",
568
- interactive=True
569
  )
570
  with gr.Row(variant="compact"):
571
  view_rules_btn = gr.Button("πŸ”„ Load Rules")
572
  save_edited_rules_btn = gr.Button("πŸ’Ύ Save Rules", variant="primary")
 
 
573
  clear_rules_btn = gr.Button("πŸ—‘οΈ Clear Rules", variant="stop")
574
  upload_rules_fobj = gr.File(
575
- label="Upload Rules",
576
  file_types=[".txt", ".jsonl"]
577
  )
578
  rules_stat_tb = gr.Textbox(
579
- label="Rules Status",
580
- interactive=False,
581
- lines=1,
582
- elem_classes=["status-text"]
583
  )
584
 
585
  with gr.Column():
586
  gr.Markdown("### πŸ“š Memories Management")
587
  mems_disp_json = gr.JSON(
588
- label=None,
589
- value={"memories": []}
590
  )
591
  with gr.Row(variant="compact"):
592
  view_mems_btn = gr.Button("πŸ”„ Load Memories")
 
 
593
  clear_mems_btn = gr.Button("πŸ—‘οΈ Clear Memories", variant="stop")
594
  upload_mems_fobj = gr.File(
595
- label="Upload Memories",
596
- file_types=[".jsonl"]
597
  )
598
  mems_stat_tb = gr.Textbox(
599
- label="Memories Status",
600
- interactive=False,
601
- lines=1,
602
- elem_classes=["status-text"]
603
  )
604
 
605
- # Event Handlers (unchanged logic, cleaned up organization)
606
  def dyn_upd_model_dd(sel_prov_dyn: str):
607
  models_dyn = get_model_display_names_for_provider(sel_prov_dyn)
608
  def_model_dyn = get_default_model_display_name_for_provider(sel_prov_dyn)
@@ -612,10 +613,23 @@ with gr.Blocks(
612
 
613
  chat_ins = [user_msg_tb, main_chat_disp, prov_sel_dd, model_sel_dd, api_key_tb, sys_prompt_tb]
614
  chat_outs = [user_msg_tb, main_chat_disp, agent_stat_tb, detect_out_md, fmt_report_tb, dl_report_btn]
615
- send_btn.click(fn=handle_gradio_chat_submit, inputs=chat_ins, outputs=chat_outs)
616
- user_msg_tb.submit(fn=handle_gradio_chat_submit, inputs=chat_ins, outputs=chat_outs)
 
 
 
 
 
 
 
 
 
 
617
 
 
618
  view_rules_btn.click(fn=ui_view_rules_action_fn, outputs=rules_disp_ta)
 
 
619
 
620
  def save_edited_rules_action_fn(edited_rules_text: str, progress=gr.Progress()):
621
  if not edited_rules_text.strip():
@@ -638,7 +652,7 @@ with gr.Blocks(
638
  skipped += 1
639
  else:
640
  errors += 1
641
- progress((idx + 1) / total)
642
  return f"Editor Save: Added: {added}, Skipped (duplicates): {skipped}, Errors/Invalid: {errors}."
643
 
644
  save_edited_rules_btn.click(
@@ -656,17 +670,14 @@ with gr.Blocks(
656
  ).then(fn=ui_view_rules_action_fn, outputs=rules_disp_ta)
657
 
658
  clear_rules_btn.click(
659
- fn=lambda: "All rules cleared." if clear_all_rules_data_backend() else "Error clearing rules.",
660
  outputs=rules_stat_tb
661
  ).then(fn=ui_view_rules_action_fn, outputs=rules_disp_ta)
662
 
663
- if MEMORY_STORAGE_BACKEND == "RAM" and 'save_faiss_sidebar_btn' in locals():
664
- def save_faiss_action_with_feedback_sidebar_fn():
665
- save_faiss_indices_to_disk()
666
- gr.Info("Attempted to save FAISS indices to disk.")
667
- save_faiss_sidebar_btn.click(fn=save_faiss_action_with_feedback_sidebar_fn, inputs=None, outputs=None)
668
-
669
  view_mems_btn.click(fn=ui_view_memories_action_fn, outputs=mems_disp_json)
 
 
670
 
671
  upload_mems_fobj.upload(
672
  fn=ui_upload_memories_action_fn,
@@ -676,10 +687,17 @@ with gr.Blocks(
676
  ).then(fn=ui_view_memories_action_fn, outputs=mems_disp_json)
677
 
678
  clear_mems_btn.click(
679
- fn=lambda: "All memories cleared." if clear_all_memory_data_backend() else "Error clearing memories.",
680
  outputs=mems_stat_tb
681
  ).then(fn=ui_view_memories_action_fn, outputs=mems_disp_json)
682
 
 
 
 
 
 
 
 
683
  def app_load_fn():
684
  initialize_memory_system()
685
  logger.info("App loaded. Memory system initialized.")
@@ -691,7 +709,7 @@ with gr.Blocks(
691
  demo.load(fn=app_load_fn, inputs=None, outputs=[agent_stat_tb, rules_disp_ta, mems_disp_json])
692
 
693
  if __name__ == "__main__":
694
- logger.info(f"Starting Gradio AI Research Mega Agent (v5.8 - UI Style/Layout Update, Memory: {MEMORY_STORAGE_BACKEND})...")
695
  app_port = int(os.getenv("GRADIO_PORT", 7860))
696
  app_server = os.getenv("GRADIO_SERVER_NAME", "127.0.0.1")
697
  app_debug = os.getenv("GRADIO_DEBUG", "False").lower() == "true"
 
43
  logger.info(f"App Config: WebSearch={WEB_SEARCH_ENABLED}, ToolDecisionProvider={TOOL_DECISION_PROVIDER_ENV}, ToolDecisionModelID={TOOL_DECISION_MODEL_ID_ENV}, MemoryBackend={MEMORY_STORAGE_BACKEND}")
44
 
45
  # --- Helper Functions (format_insights_for_prompt, generate_interaction_metrics, etc.) ---
 
46
  def format_insights_for_prompt(retrieved_insights_list: list[str]) -> tuple[str, list[dict]]:
47
  if not retrieved_insights_list:
48
  return "No specific guiding principles or learned insights retrieved.", []
 
275
  def handle_gradio_chat_submit(user_msg_txt: str, gr_hist_list: list, sel_prov_name: str, sel_model_disp_name: str, ui_api_key: str|None, cust_sys_prompt: str):
276
  global current_chat_session_history
277
  cleared_input, updated_gr_hist, status_txt = "", list(gr_hist_list), "Initializing..."
278
+ def_detect_out_md = gr.Markdown(visible=False) # Initialize with visible=False
279
+ def_fmt_out_txt = gr.Textbox(value="*Waiting...*", interactive=True) # Ensure interactive
280
  def_dl_btn = gr.DownloadButton(interactive=False, value=None, visible=False)
281
+
282
  if not user_msg_txt.strip():
283
  status_txt = "Error: Empty message."
284
  updated_gr_hist.append((user_msg_txt or "(Empty)", status_txt))
285
+ # Make sure to yield for all outputs in chat_outs
286
+ yield (cleared_input, updated_gr_hist, status_txt, def_detect_out_md, def_fmt_out_txt, def_dl_btn)
287
+ return
288
+
289
  updated_gr_hist.append((user_msg_txt, "<i>Thinking...</i>"))
290
+ # Initial yield for chat update
291
  yield (cleared_input, updated_gr_hist, status_txt, def_detect_out_md, def_fmt_out_txt, def_dl_btn)
292
+
293
  internal_hist = list(current_chat_session_history); internal_hist.append({"role": "user", "content": user_msg_txt})
294
  if len(internal_hist) > (MAX_HISTORY_TURNS * 2 + 1):
295
  if internal_hist[0]["role"] == "system" and len(internal_hist) > (MAX_HISTORY_TURNS * 2 + 1) : internal_hist = [internal_hist[0]] + internal_hist[-(MAX_HISTORY_TURNS * 2):]
296
  else: internal_hist = internal_hist[-(MAX_HISTORY_TURNS * 2):]
297
+
298
  final_bot_resp_acc, insights_used_parsed = "", []
299
  temp_dl_file_path = None
300
+
301
  try:
302
  processor_gen = process_user_interaction_gradio(user_input=user_msg_txt, provider_name=sel_prov_name, model_display_name=sel_model_disp_name, chat_history_for_prompt=internal_hist, custom_system_prompt=cust_sys_prompt.strip() or None, ui_api_key_override=ui_api_key.strip() if ui_api_key else None)
303
  curr_bot_disp_msg = ""
304
  for upd_type, upd_data in processor_gen:
305
  if upd_type == "status":
306
  status_txt = upd_data
307
+ if updated_gr_hist and updated_gr_hist[-1][0] == user_msg_txt:
308
+ updated_gr_hist[-1] = (user_msg_txt, f"{curr_bot_disp_msg} <i>{status_txt}</i>" if curr_bot_disp_msg else f"<i>{status_txt}</i>")
309
  elif upd_type == "response_chunk":
310
  curr_bot_disp_msg += upd_data
311
+ if updated_gr_hist and updated_gr_hist[-1][0] == user_msg_txt:
312
+ updated_gr_hist[-1] = (user_msg_txt, curr_bot_disp_msg)
313
  elif upd_type == "final_response_and_insights":
314
  final_bot_resp_acc, insights_used_parsed = upd_data["response"], upd_data["insights_used"]
315
  status_txt = "Response complete."
316
  if not curr_bot_disp_msg and final_bot_resp_acc : curr_bot_disp_msg = final_bot_resp_acc
317
+ if updated_gr_hist and updated_gr_hist[-1][0] == user_msg_txt:
318
+ updated_gr_hist[-1] = (user_msg_txt, curr_bot_disp_msg or "(No text)")
319
+
320
+ def_fmt_out_txt = gr.Textbox(value=curr_bot_disp_msg, interactive=True, show_copy_button=True) # Update
321
+
322
  if curr_bot_disp_msg and not curr_bot_disp_msg.startswith("Error:"):
323
  with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".md", encoding='utf-8') as tmpfile:
324
  tmpfile.write(curr_bot_disp_msg)
325
  temp_dl_file_path = tmpfile.name
326
  def_dl_btn = gr.DownloadButton(value=temp_dl_file_path, visible=True, interactive=True)
327
+ else:
328
+ def_dl_btn = gr.DownloadButton(interactive=False, value=None, visible=False)
329
+
330
+ insights_md_content = "### Insights Considered:\n" + ("\n".join([f"- **[{i.get('type','N/A')}|{i.get('score','N/A')}]** {i.get('text','N/A')[:100]}..." for i in insights_used_parsed[:3]]) if insights_used_parsed else "*None specific.*")
331
+ def_detect_out_md = gr.Markdown(value=insights_md_content, visible=True) # Update with value and visible
332
+
333
  yield (cleared_input, updated_gr_hist, status_txt, def_detect_out_md, def_fmt_out_txt, def_dl_btn)
334
  if upd_type == "final_response_and_insights": break
335
  except Exception as e:
336
  logger.error(f"Chat handler error: {e}", exc_info=True); status_txt = f"Error: {str(e)[:100]}"
337
+ error_message_for_chat = f"Sorry, an error occurred: {str(e)[:100]}"
338
+ if updated_gr_hist and updated_gr_hist[-1][0] == user_msg_txt:
339
+ updated_gr_hist[-1] = (user_msg_txt, error_message_for_chat)
340
+ else:
341
+ updated_gr_hist.append((user_msg_txt, error_message_for_chat))
342
+ def_fmt_out_txt = gr.Textbox(value=error_message_for_chat, interactive=True) # Update
343
+ def_dl_btn = gr.DownloadButton(interactive=False, value=None, visible=False) # Reset
344
+ def_detect_out_md = gr.Markdown(value="*Error processing request.*", visible=True) # Update
345
+ yield (cleared_input, updated_gr_hist, status_txt, def_detect_out_md, def_fmt_out_txt, def_dl_btn)
346
+ return
347
+
348
  if final_bot_resp_acc and not final_bot_resp_acc.startswith("Error:"):
349
  current_chat_session_history.extend([{"role": "user", "content": user_msg_txt}, {"role": "assistant", "content": final_bot_resp_acc}])
350
  hist_len_check = MAX_HISTORY_TURNS * 2
351
  if current_chat_session_history and current_chat_session_history[0]["role"] == "system": hist_len_check +=1
352
  if len(current_chat_session_history) > hist_len_check:
353
  current_chat_session_history = ([current_chat_session_history[0]] if current_chat_session_history[0]["role"] == "system" else []) + current_chat_session_history[-(MAX_HISTORY_TURNS * 2):]
354
+
355
  threading.Thread(target=deferred_learning_and_memory_task, args=(user_msg_txt, final_bot_resp_acc, sel_prov_name, sel_model_disp_name, insights_used_parsed, ui_api_key.strip() if ui_api_key else None), daemon=True).start()
356
  status_txt = "Response complete. Background learning initiated."
357
+ else:
358
+ status_txt = "Processing finished; no valid response or error occurred."
359
+ if final_bot_resp_acc.startswith("Error:"): # If there was an error string in final_bot_resp_acc
360
+ status_txt = final_bot_resp_acc
361
+ if updated_gr_hist and updated_gr_hist[-1][0] == user_msg_txt:
362
+ updated_gr_hist[-1] = (user_msg_txt, final_bot_resp_acc) # Update chatbot with error
363
+ def_fmt_out_txt = gr.Textbox(value=final_bot_resp_acc, interactive=True)
364
+ def_dl_btn = gr.DownloadButton(interactive=False, value=None, visible=False)
365
+
366
+ # Final yield for this handler
367
  yield (cleared_input, updated_gr_hist, status_txt, def_detect_out_md, def_fmt_out_txt, def_dl_btn)
368
+
369
  if temp_dl_file_path and os.path.exists(temp_dl_file_path):
370
  try: os.unlink(temp_dl_file_path)
371
  except Exception as e_unlink: logger.error(f"Error deleting temp download file {temp_dl_file_path}: {e_unlink}")
372
 
373
+ # --- UI Functions for Rules and Memories ---
374
  def ui_view_rules_action_fn(): return "\n\n---\n\n".join(get_all_rules_cached()) or "No rules found."
375
+
376
+ def ui_download_rules_action_fn():
377
+ rules_content = "\n\n---\n\n".join(get_all_rules_cached())
378
+ if not rules_content.strip():
379
+ gr.Warning("No rules to download.")
380
+ # Return an update to clear any previous file and disable button
381
+ return gr.DownloadButton(value=None, interactive=False, label="No Rules")
382
+
383
+ try:
384
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".txt", encoding='utf-8') as tmpfile:
385
+ tmpfile.write(rules_content)
386
+ # Gradio's DownloadButton click handler should return the filepath string
387
+ return tmpfile.name
388
+ except Exception as e:
389
+ logger.error(f"Error creating rules download file: {e}")
390
+ gr.Error(f"Failed to prepare rules for download: {e}")
391
+ return gr.DownloadButton(value=None, interactive=False, label="Error")
392
+
393
  def ui_upload_rules_action_fn(uploaded_file_obj, progress=gr.Progress()):
394
  if not uploaded_file_obj: return "No file provided for rules upload."
395
  try:
 
413
  msg = f"Rules Upload: Processed {total_to_process}. Added: {added_count}, Skipped (duplicates): {skipped_count}, Errors/Invalid: {error_count}."
414
  logger.info(msg); return msg
415
 
416
+ def ui_view_memories_action_fn(): return get_all_memories_cached() or [] # Returns list of dicts
417
+
418
+ def ui_download_memories_action_fn():
419
+ memories = get_all_memories_cached() # list of dicts
420
+ if not memories:
421
+ gr.Warning("No memories to download.")
422
+ return gr.DownloadButton(value=None, interactive=False, label="No Memories")
423
+
424
+ jsonl_content = ""
425
+ for mem_dict in memories:
426
+ try:
427
+ jsonl_content += json.dumps(mem_dict) + "\n"
428
+ except Exception as e:
429
+ logger.error(f"Error serializing memory for download: {mem_dict}, Error: {e}")
430
+ # Skip problematic memory for download
431
+
432
+ if not jsonl_content.strip():
433
+ gr.Warning("No valid memories to serialize for download.")
434
+ return gr.DownloadButton(value=None, interactive=False, label="No Data")
435
+
436
+ try:
437
+ with tempfile.NamedTemporaryFile(mode="w", delete=False, suffix=".jsonl", encoding='utf-8') as tmpfile:
438
+ tmpfile.write(jsonl_content)
439
+ return tmpfile.name # Return filepath string
440
+ except Exception as e:
441
+ logger.error(f"Error creating memories download file: {e}")
442
+ gr.Error(f"Failed to prepare memories for download: {e}")
443
+ return gr.DownloadButton(value=None, interactive=False, label="Error")
444
+
445
  def ui_upload_memories_action_fn(uploaded_file_obj, progress=gr.Progress()):
446
  if not uploaded_file_obj: return "No file provided for memories upload."
447
  try:
 
460
  except: format_error_count += 1
461
  if not memory_objects_to_process and format_error_count == 0: return "No valid memory objects found."
462
  total_to_process = len(memory_objects_to_process)
463
+ if total_to_process == 0: return "No memory objects to process (after parsing)."
464
  progress(0, desc="Starting memories upload...")
465
  for idx, mem_data in enumerate(memory_objects_to_process):
466
  if isinstance(mem_data, dict) and all(k in mem_data for k in ["user_input", "bot_response", "metrics"]):
 
472
  msg = f"Memories Upload: Processed {total_to_process}. Added: {added_count}, Format Errors: {format_error_count}, Save Errors: {save_error_count}."
473
  logger.info(msg); return msg
474
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
475
 
476
  with gr.Blocks(
477
+ theme=gr.themes.Soft(),
478
  css="""
479
  .gr-button { margin: 5px; }
480
  .gr-textbox, .gr-text-area, .gr-dropdown { border-radius: 8px; }
 
484
  .status-text { font-size: 0.9em; color: #555; }
485
  """
486
  ) as demo:
 
487
  gr.Markdown(
488
  """
489
  # πŸ€– AI Research Agent
 
492
  elem_classes=["header"]
493
  )
494
 
 
495
  is_sqlite = MEMORY_STORAGE_BACKEND == "SQLITE"
496
  is_hf_dataset = MEMORY_STORAGE_BACKEND == "HF_DATASET"
497
 
498
  with gr.Row(variant="compact"):
499
  agent_stat_tb = gr.Textbox(
500
+ label="Agent Status", value="Initializing systems...", interactive=False,
501
+ elem_classes=["status-text"], scale=4
 
 
 
502
  )
503
  with gr.Column(scale=1, min_width=150):
504
  memory_backend_info_tb = gr.Textbox(
505
+ label="Memory Backend", value=MEMORY_STORAGE_BACKEND, interactive=False,
 
 
506
  elem_classes=["status-text"]
507
  )
508
  sqlite_path_display = gr.Textbox(
509
+ label="SQLite Path", value=MEMORY_SQLITE_PATH, interactive=False,
510
+ visible=is_sqlite, elem_classes=["status-text"]
 
 
 
511
  )
512
  hf_repos_display = gr.Textbox(
513
+ label="HF Repos", value=f"M: {MEMORY_HF_MEM_REPO}, R: {MEMORY_HF_RULES_REPO}",
514
+ interactive=False, visible=is_hf_dataset, elem_classes=["status-text"]
 
 
 
515
  )
516
 
517
  with gr.Row():
 
518
  with gr.Sidebar():
519
  gr.Markdown("## βš™οΈ Configuration")
520
  with gr.Group():
521
  gr.Markdown("### AI Model Settings")
522
  api_key_tb = gr.Textbox(
523
+ label="AI Provider API Key (Override)", type="password", placeholder="Uses .env if blank"
 
 
524
  )
525
  prov_sel_dd = gr.Dropdown(
526
+ label="AI Provider", choices=get_available_providers(),
527
+ value=get_available_providers()[0] if get_available_providers() else None, interactive=True
 
 
528
  )
529
  model_sel_dd = gr.Dropdown(
530
  label="AI Model",
 
535
  with gr.Group():
536
  gr.Markdown("### System Prompt")
537
  sys_prompt_tb = gr.Textbox(
538
+ label="System Prompt Base", lines=8, value=DEFAULT_SYSTEM_PROMPT, interactive=True
 
 
 
539
  )
540
  if MEMORY_STORAGE_BACKEND == "RAM":
541
  save_faiss_sidebar_btn = gr.Button("Save FAISS Indices", variant="secondary")
542
 
 
543
  with gr.Column(scale=3):
544
  with gr.Tabs():
545
  with gr.TabItem("πŸ’¬ Chat & Research"):
546
  with gr.Group():
547
  gr.Markdown("### AI Chat Interface")
548
  main_chat_disp = gr.Chatbot(
549
+ label=None, height=400, bubble_full_width=False,
 
 
550
  avatar_images=(None, "https://raw.githubusercontent.com/huggingface/brand-assets/main/hf-logo-with-title.png"),
551
+ show_copy_button=True, render_markdown=True, sanitize_html=True
 
 
552
  )
553
  with gr.Row(variant="compact"):
554
  user_msg_tb = gr.Textbox(
555
+ show_label=False, placeholder="Ask your research question...",
556
+ scale=7, lines=1, max_lines=3
 
 
 
557
  )
558
  send_btn = gr.Button("Send", variant="primary", scale=1, min_width=100)
559
  with gr.Accordion("πŸ“ Detailed Response", open=False):
560
  fmt_report_tb = gr.Textbox(
561
+ label="Full AI Response", lines=8, interactive=True, show_copy_button=True
 
 
 
562
  )
563
  dl_report_btn = gr.DownloadButton(
564
+ "Download Report", value=None, interactive=False, visible=False # Ensure value is None initially
 
 
565
  )
566
+ detect_out_md = gr.Markdown(visible=False) # Ensure visible is False initially
567
 
568
  with gr.TabItem("🧠 Knowledge Base"):
569
  with gr.Row(equal_height=True):
570
  with gr.Column():
571
  gr.Markdown("### πŸ“œ Rules Management")
572
  rules_disp_ta = gr.TextArea(
573
+ label=None, lines=10, placeholder="View or edit rules...", interactive=True
 
 
 
574
  )
575
  with gr.Row(variant="compact"):
576
  view_rules_btn = gr.Button("πŸ”„ Load Rules")
577
  save_edited_rules_btn = gr.Button("πŸ’Ύ Save Rules", variant="primary")
578
+ with gr.Row(variant="compact"):
579
+ dl_rules_btn = gr.DownloadButton("⬇️ Download Rules", value=None) # Add download button
580
  clear_rules_btn = gr.Button("πŸ—‘οΈ Clear Rules", variant="stop")
581
  upload_rules_fobj = gr.File(
582
+ label="Upload Rules File (.txt, .jsonl)",
583
  file_types=[".txt", ".jsonl"]
584
  )
585
  rules_stat_tb = gr.Textbox(
586
+ label="Rules Status", interactive=False, lines=1, elem_classes=["status-text"]
 
 
 
587
  )
588
 
589
  with gr.Column():
590
  gr.Markdown("### πŸ“š Memories Management")
591
  mems_disp_json = gr.JSON(
592
+ label=None, value=[] # Initialize with empty list
 
593
  )
594
  with gr.Row(variant="compact"):
595
  view_mems_btn = gr.Button("πŸ”„ Load Memories")
596
+ dl_mems_btn = gr.DownloadButton("⬇️ Download Memories", value=None) # Add download button
597
+ with gr.Row(variant="compact"):
598
  clear_mems_btn = gr.Button("πŸ—‘οΈ Clear Memories", variant="stop")
599
  upload_mems_fobj = gr.File(
600
+ label="Upload Memories File (.jsonl)",
601
+ file_types=[".jsonl", ".json"] # Allow .json for single list
602
  )
603
  mems_stat_tb = gr.Textbox(
604
+ label="Memories Status", interactive=False, lines=1, elem_classes=["status-text"]
 
 
 
605
  )
606
 
 
607
  def dyn_upd_model_dd(sel_prov_dyn: str):
608
  models_dyn = get_model_display_names_for_provider(sel_prov_dyn)
609
  def_model_dyn = get_default_model_display_name_for_provider(sel_prov_dyn)
 
613
 
614
  chat_ins = [user_msg_tb, main_chat_disp, prov_sel_dd, model_sel_dd, api_key_tb, sys_prompt_tb]
615
  chat_outs = [user_msg_tb, main_chat_disp, agent_stat_tb, detect_out_md, fmt_report_tb, dl_report_btn]
616
+
617
+ # Chat submission events
618
+ chat_event_args = {"fn": handle_gradio_chat_submit, "inputs": chat_ins, "outputs": chat_outs}
619
+
620
+ send_btn_click_event = send_btn.click(**chat_event_args)
621
+ user_msg_submit_event = user_msg_tb.submit(**chat_event_args)
622
+
623
+ # Chain UI refreshes for rules and memories after chat interaction
624
+ for event in [send_btn_click_event, user_msg_submit_event]:
625
+ event.then(fn=ui_view_rules_action_fn, inputs=None, outputs=rules_disp_ta)
626
+ event.then(fn=ui_view_memories_action_fn, inputs=None, outputs=mems_disp_json)
627
+
628
 
629
+ # Rules Management events
630
  view_rules_btn.click(fn=ui_view_rules_action_fn, outputs=rules_disp_ta)
631
+ dl_rules_btn.click(fn=ui_download_rules_action_fn, inputs=None, outputs=dl_rules_btn)
632
+
633
 
634
  def save_edited_rules_action_fn(edited_rules_text: str, progress=gr.Progress()):
635
  if not edited_rules_text.strip():
 
652
  skipped += 1
653
  else:
654
  errors += 1
655
+ progress((idx + 1) / total, desc=f"Processed {idx+1}/{total} rules...")
656
  return f"Editor Save: Added: {added}, Skipped (duplicates): {skipped}, Errors/Invalid: {errors}."
657
 
658
  save_edited_rules_btn.click(
 
670
  ).then(fn=ui_view_rules_action_fn, outputs=rules_disp_ta)
671
 
672
  clear_rules_btn.click(
673
+ fn=lambda: ("All rules cleared." if clear_all_rules_data_backend() else "Error clearing rules."),
674
  outputs=rules_stat_tb
675
  ).then(fn=ui_view_rules_action_fn, outputs=rules_disp_ta)
676
 
677
+ # Memories Management events
 
 
 
 
 
678
  view_mems_btn.click(fn=ui_view_memories_action_fn, outputs=mems_disp_json)
679
+ dl_mems_btn.click(fn=ui_download_memories_action_fn, inputs=None, outputs=dl_mems_btn)
680
+
681
 
682
  upload_mems_fobj.upload(
683
  fn=ui_upload_memories_action_fn,
 
687
  ).then(fn=ui_view_memories_action_fn, outputs=mems_disp_json)
688
 
689
  clear_mems_btn.click(
690
+ fn=lambda: ("All memories cleared." if clear_all_memory_data_backend() else "Error clearing memories."),
691
  outputs=mems_stat_tb
692
  ).then(fn=ui_view_memories_action_fn, outputs=mems_disp_json)
693
 
694
+ # Save FAISS for RAM backend
695
+ if MEMORY_STORAGE_BACKEND == "RAM" and 'save_faiss_sidebar_btn' in locals():
696
+ def save_faiss_action_with_feedback_sidebar_fn():
697
+ save_faiss_indices_to_disk()
698
+ gr.Info("Attempted to save FAISS indices to disk.")
699
+ save_faiss_sidebar_btn.click(fn=save_faiss_action_with_feedback_sidebar_fn, inputs=None, outputs=None)
700
+
701
  def app_load_fn():
702
  initialize_memory_system()
703
  logger.info("App loaded. Memory system initialized.")
 
709
  demo.load(fn=app_load_fn, inputs=None, outputs=[agent_stat_tb, rules_disp_ta, mems_disp_json])
710
 
711
  if __name__ == "__main__":
712
+ logger.info(f"Starting Gradio AI Research Mega Agent (v5.9 - UI Refresh & Download/Upload Enhancements, Memory: {MEMORY_STORAGE_BACKEND})...")
713
  app_port = int(os.getenv("GRADIO_PORT", 7860))
714
  app_server = os.getenv("GRADIO_SERVER_NAME", "127.0.0.1")
715
  app_debug = os.getenv("GRADIO_DEBUG", "False").lower() == "true"