Spaces:
Sleeping
Sleeping
Meetpatel006
commited on
Update app.py
Browse filesThe periodic ping to the MCP server will start when the Gradio app loads.
app.py
CHANGED
@@ -52,10 +52,21 @@ ping_task = None
|
|
52 |
def start_ping_task():
|
53 |
"""Start the ping task when the Gradio app launches"""
|
54 |
global ping_task
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
|
61 |
|
@@ -324,37 +335,42 @@ def sync_load_concept_graph(concept_id):
|
|
324 |
return None, {"error": str(e)}, []
|
325 |
|
326 |
# Create Gradio interface
|
327 |
-
|
328 |
-
# Start the ping task when the app loads
|
329 |
-
demo.load(start_ping_task)
|
330 |
-
|
331 |
-
gr.Markdown("# π TutorX Educational AI Platform")
|
332 |
-
gr.Markdown("""
|
333 |
-
An adaptive, multi-modal, and collaborative AI tutoring platform built with MCP.
|
334 |
-
|
335 |
-
This interface demonstrates the functionality of the TutorX MCP server using SSE connections.
|
336 |
-
""")
|
337 |
-
|
338 |
# Set a default student ID for the demo
|
339 |
student_id = "student_12345"
|
340 |
|
341 |
-
with gr.
|
342 |
-
#
|
343 |
-
|
344 |
-
|
345 |
-
|
346 |
-
|
347 |
-
|
348 |
-
|
349 |
-
|
350 |
-
|
351 |
-
|
352 |
-
|
353 |
-
|
354 |
-
|
355 |
-
|
356 |
-
|
357 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
358 |
load_btn = gr.Button("Load Graph", variant="primary", scale=1)
|
359 |
|
360 |
# Concept details
|
@@ -717,7 +733,10 @@ with gr.Blocks(title="TutorX Educational AI", theme=gr.themes.Soft()) as demo:
|
|
717 |
inputs=[submission_input, reference_input],
|
718 |
outputs=[plagiarism_output]
|
719 |
)
|
|
|
|
|
720 |
|
721 |
# Launch the interface
|
722 |
if __name__ == "__main__":
|
|
|
723 |
demo.queue().launch(server_name="0.0.0.0", server_port=7860)
|
|
|
52 |
def start_ping_task():
|
53 |
"""Start the ping task when the Gradio app launches"""
|
54 |
global ping_task
|
55 |
+
try:
|
56 |
+
if ping_task is None:
|
57 |
+
loop = asyncio.get_event_loop()
|
58 |
+
if loop.is_running():
|
59 |
+
ping_task = loop.create_task(start_periodic_ping())
|
60 |
+
print("Started periodic ping task")
|
61 |
+
else:
|
62 |
+
print("Event loop is not running, will start ping task later")
|
63 |
+
except Exception as e:
|
64 |
+
print(f"Error starting ping task: {e}")
|
65 |
+
|
66 |
+
# Only run this code when the module is executed directly
|
67 |
+
if __name__ == "__main__" and not hasattr(gr, 'blocks'):
|
68 |
+
# This ensures we don't start the task when imported by Gradio
|
69 |
+
start_ping_task()
|
70 |
|
71 |
|
72 |
|
|
|
335 |
return None, {"error": str(e)}, []
|
336 |
|
337 |
# Create Gradio interface
|
338 |
+
def create_gradio_interface():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
339 |
# Set a default student ID for the demo
|
340 |
student_id = "student_12345"
|
341 |
|
342 |
+
with gr.Blocks(title="TutorX Educational AI", theme=gr.themes.Soft()) as demo:
|
343 |
+
# Start the ping task when the app loads
|
344 |
+
demo.load(
|
345 |
+
fn=start_ping_task,
|
346 |
+
inputs=None,
|
347 |
+
outputs=None,
|
348 |
+
queue=False
|
349 |
+
)
|
350 |
+
|
351 |
+
# Interface content
|
352 |
+
gr.Markdown("# π TutorX Educational AI Platform")
|
353 |
+
gr.Markdown("""
|
354 |
+
An adaptive, multi-modal, and collaborative AI tutoring platform built with MCP.
|
355 |
+
""")
|
356 |
+
|
357 |
+
with gr.Tabs() as tabs:
|
358 |
+
# Tab 1: Core Features
|
359 |
+
with gr.Tab("Core Features"):
|
360 |
+
with gr.Blocks() as concept_graph_tab:
|
361 |
+
gr.Markdown("## Concept Graph Visualization")
|
362 |
+
gr.Markdown("Explore relationships between educational concepts through an interactive graph visualization.")
|
363 |
+
|
364 |
+
with gr.Row():
|
365 |
+
# Left panel for controls and details
|
366 |
+
with gr.Column(scale=3):
|
367 |
+
with gr.Row():
|
368 |
+
concept_input = gr.Textbox(
|
369 |
+
label="Enter Concept",
|
370 |
+
placeholder="e.g., machine_learning, calculus, quantum_physics",
|
371 |
+
value="machine_learning",
|
372 |
+
scale=4
|
373 |
+
)
|
374 |
load_btn = gr.Button("Load Graph", variant="primary", scale=1)
|
375 |
|
376 |
# Concept details
|
|
|
733 |
inputs=[submission_input, reference_input],
|
734 |
outputs=[plagiarism_output]
|
735 |
)
|
736 |
+
|
737 |
+
return demo
|
738 |
|
739 |
# Launch the interface
|
740 |
if __name__ == "__main__":
|
741 |
+
demo = create_gradio_interface()
|
742 |
demo.queue().launch(server_name="0.0.0.0", server_port=7860)
|