Update app.py
Browse files
app.py
CHANGED
@@ -327,7 +327,8 @@ def create_ui() -> gr.Blocks:
|
|
327 |
""")
|
328 |
chatbot = gr.Chatbot(
|
329 |
label="Chat with Assistant",
|
330 |
-
height=400
|
|
|
331 |
)
|
332 |
msg = gr.Textbox(
|
333 |
label="Message",
|
@@ -393,12 +394,47 @@ def create_ui() -> gr.Blocks:
|
|
393 |
outputs=[start_page, input_page, analysis_page, chatbot_page, results_page, help_page, comparison_page, history_page]
|
394 |
)
|
395 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
396 |
submit_btn.click(
|
397 |
fn=lambda: gr.update(visible=True),
|
398 |
inputs=[],
|
399 |
outputs=[submit_loading]
|
400 |
).then(
|
401 |
-
fn=
|
402 |
inputs=[repo_id_input, state],
|
403 |
outputs=[df_output, submit_loading]
|
404 |
)
|
@@ -408,50 +444,21 @@ def create_ui() -> gr.Blocks:
|
|
408 |
inputs=[],
|
409 |
outputs=[search_loading]
|
410 |
).then(
|
411 |
-
fn=
|
412 |
inputs=[keyword_input, state],
|
413 |
outputs=[df_output, search_loading]
|
414 |
)
|
415 |
|
416 |
-
analyze_btn.click(
|
417 |
-
fn=lambda: navigate_to("analysis"),
|
418 |
-
inputs=[],
|
419 |
-
outputs=[start_page, input_page, analysis_page, chatbot_page, results_page]
|
420 |
-
)
|
421 |
-
|
422 |
next_btn.click(
|
423 |
fn=lambda: gr.update(visible=True),
|
424 |
inputs=[],
|
425 |
outputs=[next_loading]
|
426 |
).then(
|
427 |
-
fn=
|
428 |
inputs=[state],
|
429 |
outputs=[content_output, summary_output, df_output, next_loading]
|
430 |
)
|
431 |
|
432 |
-
finish_btn.click(
|
433 |
-
fn=lambda: navigate_to("results"),
|
434 |
-
inputs=[],
|
435 |
-
outputs=[start_page, input_page, analysis_page, chatbot_page, results_page]
|
436 |
-
)
|
437 |
-
|
438 |
-
def user_send(user_message: str, history: List[Tuple[str, str]], state: AppState) -> Tuple[List[Tuple[str, str]], str]:
|
439 |
-
"""Handle user message in chatbot."""
|
440 |
-
if not user_message:
|
441 |
-
return history, ""
|
442 |
-
history.append((user_message, ""))
|
443 |
-
response = chat_with_user(user_message, history, CHATBOT_SYSTEM_PROMPT)
|
444 |
-
history[-1] = (user_message, response)
|
445 |
-
return history, ""
|
446 |
-
|
447 |
-
def end_chat(history: List[Tuple[str, str]], state: AppState) -> Tuple[List[str], List[gr.update]]:
|
448 |
-
"""End chat and extract keywords."""
|
449 |
-
if not history:
|
450 |
-
return [], navigate_to("results")
|
451 |
-
keywords = extract_keywords_from_conversation(history)
|
452 |
-
state.generated_keywords = keywords
|
453 |
-
return keywords, navigate_to("results")
|
454 |
-
|
455 |
send_btn.click(
|
456 |
fn=lambda: gr.update(visible=True),
|
457 |
inputs=[],
|
@@ -469,7 +476,17 @@ def create_ui() -> gr.Blocks:
|
|
469 |
).then(
|
470 |
fn=end_chat_with_loading,
|
471 |
inputs=[chatbot, state],
|
472 |
-
outputs=[gr.Textbox(label="Extracted Keywords"),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
473 |
)
|
474 |
|
475 |
restart_btn.click(
|
@@ -514,16 +531,6 @@ def create_ui() -> gr.Blocks:
|
|
514 |
logger.error(f"Error saving to history: {e}")
|
515 |
|
516 |
# Add new event handlers for new features
|
517 |
-
export_btn.click(
|
518 |
-
fn=lambda: gr.update(visible=True),
|
519 |
-
inputs=[],
|
520 |
-
outputs=[export_loading]
|
521 |
-
).then(
|
522 |
-
fn=export_with_loading,
|
523 |
-
inputs=[results_df],
|
524 |
-
outputs=[gr.Textbox(label="Export Status"), export_loading]
|
525 |
-
)
|
526 |
-
|
527 |
history_btn.click(
|
528 |
fn=lambda: (load_history(), navigate_to("history")),
|
529 |
inputs=[],
|
|
|
327 |
""")
|
328 |
chatbot = gr.Chatbot(
|
329 |
label="Chat with Assistant",
|
330 |
+
height=400,
|
331 |
+
type="messages"
|
332 |
)
|
333 |
msg = gr.Textbox(
|
334 |
label="Message",
|
|
|
394 |
outputs=[start_page, input_page, analysis_page, chatbot_page, results_page, help_page, comparison_page, history_page]
|
395 |
)
|
396 |
|
397 |
+
# Modified event handlers with loading states
|
398 |
+
def process_repo_input_with_loading(text: str, state: AppState) -> Tuple[pd.DataFrame, gr.update]:
|
399 |
+
"""Process repo input with loading state."""
|
400 |
+
return process_repo_input(text, state), gr.update(visible=False)
|
401 |
+
|
402 |
+
def keyword_search_with_loading(keyword: str, state: AppState) -> Tuple[pd.DataFrame, gr.update]:
|
403 |
+
"""Search keywords with loading state."""
|
404 |
+
return keyword_search_and_update(keyword, state), gr.update(visible=False)
|
405 |
+
|
406 |
+
def analyze_with_loading(state: AppState) -> Tuple[str, str, pd.DataFrame, gr.update]:
|
407 |
+
"""Analyze with loading state."""
|
408 |
+
return *show_combined_repo_and_llm(state), gr.update(visible=False)
|
409 |
+
|
410 |
+
def send_message_with_loading(user_message: str, history: List[Dict[str, str]], state: AppState) -> Tuple[List[Dict[str, str]], str, gr.update]:
|
411 |
+
"""Send message with loading state."""
|
412 |
+
if not user_message:
|
413 |
+
return history, "", gr.update(visible=False)
|
414 |
+
history.append({"role": "user", "content": user_message})
|
415 |
+
response = chat_with_user(user_message, history, CHATBOT_SYSTEM_PROMPT)
|
416 |
+
history.append({"role": "assistant", "content": response})
|
417 |
+
return history, "", gr.update(visible=False)
|
418 |
+
|
419 |
+
def end_chat_with_loading(history: List[Dict[str, str]], state: AppState) -> Tuple[List[str], gr.update, gr.update]:
|
420 |
+
"""End chat and extract keywords."""
|
421 |
+
if not history:
|
422 |
+
return [], gr.update(visible=True), gr.update(visible=False)
|
423 |
+
keywords = extract_keywords_from_conversation(history)
|
424 |
+
state.generated_keywords = keywords
|
425 |
+
return keywords, gr.update(visible=True), gr.update(visible=False)
|
426 |
+
|
427 |
+
def export_with_loading(df: pd.DataFrame) -> Tuple[str, gr.update]:
|
428 |
+
"""Export with loading state."""
|
429 |
+
return export_results(df), gr.update(visible=False)
|
430 |
+
|
431 |
+
# Update event handlers with loading states
|
432 |
submit_btn.click(
|
433 |
fn=lambda: gr.update(visible=True),
|
434 |
inputs=[],
|
435 |
outputs=[submit_loading]
|
436 |
).then(
|
437 |
+
fn=process_repo_input_with_loading,
|
438 |
inputs=[repo_id_input, state],
|
439 |
outputs=[df_output, submit_loading]
|
440 |
)
|
|
|
444 |
inputs=[],
|
445 |
outputs=[search_loading]
|
446 |
).then(
|
447 |
+
fn=keyword_search_with_loading,
|
448 |
inputs=[keyword_input, state],
|
449 |
outputs=[df_output, search_loading]
|
450 |
)
|
451 |
|
|
|
|
|
|
|
|
|
|
|
|
|
452 |
next_btn.click(
|
453 |
fn=lambda: gr.update(visible=True),
|
454 |
inputs=[],
|
455 |
outputs=[next_loading]
|
456 |
).then(
|
457 |
+
fn=analyze_with_loading,
|
458 |
inputs=[state],
|
459 |
outputs=[content_output, summary_output, df_output, next_loading]
|
460 |
)
|
461 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
462 |
send_btn.click(
|
463 |
fn=lambda: gr.update(visible=True),
|
464 |
inputs=[],
|
|
|
476 |
).then(
|
477 |
fn=end_chat_with_loading,
|
478 |
inputs=[chatbot, state],
|
479 |
+
outputs=[gr.Textbox(label="Extracted Keywords"), results_page, end_chat_loading]
|
480 |
+
)
|
481 |
+
|
482 |
+
export_btn.click(
|
483 |
+
fn=lambda: gr.update(visible=True),
|
484 |
+
inputs=[],
|
485 |
+
outputs=[export_loading]
|
486 |
+
).then(
|
487 |
+
fn=export_with_loading,
|
488 |
+
inputs=[results_df],
|
489 |
+
outputs=[gr.Textbox(label="Export Status"), export_loading]
|
490 |
)
|
491 |
|
492 |
restart_btn.click(
|
|
|
531 |
logger.error(f"Error saving to history: {e}")
|
532 |
|
533 |
# Add new event handlers for new features
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
534 |
history_btn.click(
|
535 |
fn=lambda: (load_history(), navigate_to("history")),
|
536 |
inputs=[],
|