Meet Patel commited on
Commit
ff522ab
·
1 Parent(s): 66fc7d8

Refactor: Update Gradio app initialization to use create_gradio_interface function

Browse files
Files changed (2) hide show
  1. app.py +426 -345
  2. run.py +4 -3
app.py CHANGED
@@ -6,15 +6,11 @@ import os
6
  import json
7
  import asyncio
8
  import gradio as gr
9
- from typing import Optional, Dict, Any, List, Union, Tuple, Callable
10
  import requests
11
- import tempfile
12
- import base64
13
- import re
14
  import networkx as nx
15
  import matplotlib
16
  import matplotlib.pyplot as plt
17
- import time
18
  from datetime import datetime
19
 
20
  # Set matplotlib to use 'Agg' backend to avoid GUI issues in Gradio
@@ -23,7 +19,6 @@ matplotlib.use('Agg')
23
  # Import MCP client components
24
  from mcp.client.sse import sse_client
25
  from mcp.client.session import ClientSession
26
- from mcp.types import TextContent, CallToolResult
27
 
28
  # Server configuration
29
  SERVER_URL = "https://tutorx-mcp.onrender.com/sse" # Ensure this is the SSE endpoint
@@ -49,13 +44,67 @@ async def start_periodic_ping(interval_minutes: int = 10) -> None:
49
  # Store the ping task reference
50
  ping_task = None
51
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  def start_ping_task():
53
  """Start the ping task when the Gradio app launches"""
54
  global ping_task
55
- if ping_task is None:
56
- loop = asyncio.get_event_loop()
57
- ping_task = loop.create_task(start_periodic_ping())
58
- print("Started periodic ping task")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59
 
60
 
61
 
@@ -323,28 +372,206 @@ def sync_load_concept_graph(concept_id):
323
  except Exception as e:
324
  return None, {"error": str(e)}, []
325
 
326
- # Create Gradio interface
327
- with gr.Blocks(title="TutorX Educational AI", theme=gr.themes.Soft()) as demo:
328
- # Start the ping task when the app loads
329
- demo.load(start_ping_task)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
330
 
331
- gr.Markdown("# 📚 TutorX Educational AI Platform")
332
- gr.Markdown("""
333
- An adaptive, multi-modal, and collaborative AI tutoring platform built with MCP.
334
-
335
- This interface demonstrates the functionality of the TutorX MCP server using SSE connections.
336
- """)
337
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
338
  # Set a default student ID for the demo
339
  student_id = "student_12345"
340
-
341
- with gr.Tabs() as tabs:
342
- # Tab 1: Core Features
343
- with gr.Tab("Core Features"):
344
- with gr.Blocks() as concept_graph_tab:
345
- gr.Markdown("## Concept Graph Visualization")
346
- gr.Markdown("Explore relationships between educational concepts through an interactive graph visualization.")
347
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
348
  with gr.Row():
349
  # Left panel for controls and details
350
  with gr.Column(scale=3):
@@ -355,15 +582,15 @@ with gr.Blocks(title="TutorX Educational AI", theme=gr.themes.Soft()) as demo:
355
  value="machine_learning",
356
  scale=4
357
  )
358
- load_btn = gr.Button("Load Graph", variant="primary", scale=1)
359
-
360
  # Concept details
361
  with gr.Accordion("Concept Details", open=True):
362
  concept_details = gr.JSON(
363
  label=None,
364
  show_label=False
365
  )
366
-
367
  # Related concepts and prerequisites
368
  with gr.Accordion("Related Concepts & Prerequisites", open=True):
369
  related_concepts = gr.Dataframe(
@@ -371,30 +598,30 @@ with gr.Blocks(title="TutorX Educational AI", theme=gr.themes.Soft()) as demo:
371
  datatype=["str", "str", "str"],
372
  interactive=False,
373
  wrap=True,
374
- # max_height=300, # Fixed height with scroll in Gradio 5.x
375
- # overflow_row_behaviour="paginate"
376
  )
377
-
378
- # Graph visualization
379
  with gr.Column(scale=7):
380
- graph_plot = gr.Plot(
381
- label="Concept Graph",
382
- show_label=True,
383
- container=True
384
- )
385
-
 
386
  # Event handlers
387
  load_btn.click(
388
  fn=sync_load_concept_graph,
389
  inputs=[concept_input],
390
  outputs=[graph_plot, concept_details, related_concepts]
391
  )
392
-
393
  # Load initial graph
394
  demo.load(
395
  fn=lambda: sync_load_concept_graph("machine_learning"),
396
  outputs=[graph_plot, concept_details, related_concepts]
397
  )
 
398
  # Help text and examples
399
  with gr.Row():
400
  gr.Markdown("""
@@ -404,320 +631,174 @@ with gr.Blocks(title="TutorX Educational AI", theme=gr.themes.Soft()) as demo:
404
  - `calculus`
405
  - `quantum_physics`
406
  """)
407
-
408
- # Error display (leave in UI, but not wired up)
409
- error_output = gr.Textbox(
410
- label="Error Messages",
411
- visible=False,
412
- interactive=False
413
- )
414
-
415
- gr.Markdown("## Assessment Generation")
416
- with gr.Row():
417
- with gr.Column():
418
- concept_input = gr.Textbox(
419
- label="Enter Concept",
420
- placeholder="e.g., Linear Equations, Photosynthesis, World War II",
421
- lines=2
422
- )
423
- with gr.Row():
424
- diff_input = gr.Slider(
425
- minimum=1,
426
- maximum=5,
427
- value=2,
428
- step=1,
429
- label="Difficulty Level",
430
- interactive=True
431
  )
432
- gen_quiz_btn = gr.Button("Generate Quiz", variant="primary")
433
-
434
- with gr.Column():
435
- quiz_output = gr.JSON(label="Generated Quiz")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
436
 
437
- async def on_generate_quiz(concept, difficulty):
438
- try:
439
- if not concept or not str(concept).strip():
440
- return {"error": "Please enter a concept"}
441
- try:
442
- difficulty = int(float(difficulty))
443
- difficulty = max(1, min(5, difficulty))
444
- except (ValueError, TypeError):
445
- difficulty = 3
446
- if difficulty <= 2:
447
- difficulty_str = "easy"
448
- elif difficulty == 3:
449
- difficulty_str = "medium"
450
- else:
451
- difficulty_str = "hard"
452
- async with sse_client(SERVER_URL) as (sse, write):
453
- async with ClientSession(sse, write) as session:
454
- await session.initialize()
455
- response = await session.call_tool("generate_quiz_tool", {"concept": concept.strip(), "difficulty": difficulty_str})
456
- if hasattr(response, 'content') and isinstance(response.content, list):
457
- for item in response.content:
458
- if hasattr(item, 'text') and item.text:
459
- try:
460
- quiz_data = json.loads(item.text)
461
- return quiz_data
462
- except Exception:
463
- return {"raw_pretty": json.dumps(item.text, indent=2)}
464
- if isinstance(response, dict):
465
- return response
466
- if isinstance(response, str):
467
- try:
468
- return json.loads(response)
469
- except Exception:
470
- return {"raw_pretty": json.dumps(response, indent=2)}
471
- return {"raw_pretty": json.dumps(str(response), indent=2)}
472
- except Exception as e:
473
- import traceback
474
- return {
475
- "error": f"Error generating quiz: {str(e)}\n\n{traceback.format_exc()}"
476
- }
477
-
478
- gen_quiz_btn.click(
479
- fn=on_generate_quiz,
480
- inputs=[concept_input, diff_input],
481
- outputs=[quiz_output],
482
- api_name="generate_quiz"
483
- )
484
 
485
- # Tab 2: Advanced Features
486
- with gr.Tab("Advanced Features"):
487
- gr.Markdown("## Lesson Generation")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
488
 
489
- with gr.Row():
490
- with gr.Column():
491
- topic_input = gr.Textbox(label="Lesson Topic", value="Solving Quadratic Equations")
492
- grade_input = gr.Slider(minimum=1, maximum=12, value=9, step=1, label="Grade Level")
493
- duration_input = gr.Slider(minimum=15, maximum=90, value=45, step=5, label="Duration (minutes)")
494
- gen_lesson_btn = gr.Button("Generate Lesson Plan")
495
-
496
- with gr.Column():
497
- lesson_output = gr.JSON(label="Lesson Plan")
498
- async def generate_lesson_async(topic, grade, duration):
499
- async with sse_client(SERVER_URL) as (sse, write):
500
- async with ClientSession(sse, write) as session:
501
- await session.initialize()
502
- response = await session.call_tool("generate_lesson_tool", {"topic": topic, "grade_level": grade, "duration_minutes": duration})
503
- if hasattr(response, 'content') and isinstance(response.content, list):
504
- for item in response.content:
505
- if hasattr(item, 'text') and item.text:
506
- try:
507
- lesson_data = json.loads(item.text)
508
- return lesson_data
509
- except Exception:
510
- return {"raw_pretty": json.dumps(item.text, indent=2)}
511
- if isinstance(response, dict):
512
- return response
513
- if isinstance(response, str):
514
- try:
515
- return json.loads(response)
516
- except Exception:
517
- return {"raw_pretty": json.dumps(response, indent=2)}
518
- return {"raw_pretty": json.dumps(str(response), indent=2)}
519
 
520
- gen_lesson_btn.click(
521
- fn=generate_lesson_async,
522
- inputs=[topic_input, grade_input, duration_input],
523
- outputs=[lesson_output]
524
- )
525
-
526
- gr.Markdown("## Learning Path Generation")
527
- with gr.Row():
528
- with gr.Column():
529
- lp_student_id = gr.Textbox(label="Student ID", value=student_id)
530
- lp_concept_ids = gr.Textbox(label="Concept IDs (comma-separated)", placeholder="e.g., python,functions,oop")
531
- lp_student_level = gr.Dropdown(choices=["beginner", "intermediate", "advanced"], value="beginner", label="Student Level")
532
- lp_btn = gr.Button("Generate Learning Path")
533
- with gr.Column():
534
- lp_output = gr.JSON(label="Learning Path")
535
- async def on_generate_learning_path(student_id, concept_ids, student_level):
536
- try:
537
- async with sse_client(SERVER_URL) as (sse, write):
538
- async with ClientSession(sse, write) as session:
539
- await session.initialize()
540
- result = await session.call_tool("get_learning_path", {
541
- "student_id": student_id,
542
- "concept_ids": [c.strip() for c in concept_ids.split(",") if c.strip()],
543
- "student_level": student_level
544
- })
545
- if hasattr(result, 'content') and isinstance(result.content, list):
546
- for item in result.content:
547
- if hasattr(item, 'text') and item.text:
548
- try:
549
- lp_data = json.loads(item.text)
550
- return lp_data
551
- except Exception:
552
- return {"raw_pretty": json.dumps(item.text, indent=2)}
553
- if isinstance(result, dict):
554
- return result
555
- if isinstance(result, str):
556
- try:
557
- return json.loads(result)
558
- except Exception:
559
- return {"raw_pretty": json.dumps(result, indent=2)}
560
- return {"raw_pretty": json.dumps(str(result), indent=2)}
561
- except Exception as e:
562
- return {"error": str(e)}
563
- lp_btn.click(
564
- fn=on_generate_learning_path,
565
- inputs=[lp_student_id, lp_concept_ids, lp_student_level],
566
- outputs=[lp_output]
567
- )
568
-
569
- # Tab 3: Multi-Modal Interaction
570
- with gr.Tab("Multi-Modal Interaction"):
571
- gr.Markdown("## Text Interaction")
572
 
 
 
573
  with gr.Row():
574
  with gr.Column():
575
- text_input = gr.Textbox(label="Ask a Question", value="How do I solve a quadratic equation?")
576
- text_btn = gr.Button("Submit")
577
-
 
578
  with gr.Column():
579
- text_output = gr.JSON(label="Response")
580
- async def text_interaction_async(text):
581
- async with sse_client(SERVER_URL) as (sse, write):
582
- async with ClientSession(sse, write) as session:
583
- await session.initialize()
584
- response = await session.call_tool("text_interaction", {"query": text, "student_id": student_id})
585
- if hasattr(response, 'content') and isinstance(response.content, list):
586
- for item in response.content:
587
- if hasattr(item, 'text') and item.text:
588
- try:
589
- data = json.loads(item.text)
590
- return data
591
- except Exception:
592
- return {"raw_pretty": json.dumps(item.text, indent=2)}
593
- if isinstance(response, dict):
594
- return response
595
- if isinstance(response, str):
596
- try:
597
- return json.loads(response)
598
- except Exception:
599
- return {"raw_pretty": json.dumps(response, indent=2)}
600
- return {"raw_pretty": json.dumps(str(response), indent=2)}
601
-
602
- text_btn.click(
603
- fn=text_interaction_async,
604
- inputs=[text_input],
605
- outputs=[text_output]
606
- )
607
 
608
- # Document OCR (PDF, images, etc.)
609
- gr.Markdown("## Document OCR & LLM Analysis")
610
- with gr.Row():
611
- with gr.Column():
612
- doc_input = gr.File(label="Upload PDF or Document", file_types=[".pdf", ".jpg", ".jpeg", ".png"])
613
- doc_ocr_btn = gr.Button("Extract Text & Analyze")
614
- with gr.Column():
615
- doc_output = gr.JSON(label="Document OCR & LLM Analysis")
616
- async def upload_file_to_storage(file_path):
617
- """Helper function to upload file to storage API"""
618
- try:
619
- url = "https://storage-bucket-api.vercel.app/upload"
620
- with open(file_path, 'rb') as f:
621
- files = {'file': (os.path.basename(file_path), f)}
622
- response = requests.post(url, files=files)
623
- response.raise_for_status()
624
- return response.json()
625
- except Exception as e:
626
- return {"error": f"Error uploading file to storage: {str(e)}", "success": False}
627
-
628
- async def document_ocr_async(file):
629
- if not file:
630
- return {"error": "No file provided", "success": False}
631
- try:
632
- if isinstance(file, dict):
633
- file_path = file.get("path", "")
634
- else:
635
- file_path = file
636
- if not file_path or not os.path.exists(file_path):
637
- return {"error": "File not found", "success": False}
638
- upload_result = await upload_file_to_storage(file_path)
639
- if not upload_result.get("success"):
640
- return upload_result
641
- storage_url = upload_result.get("storage_url")
642
- if not storage_url:
643
- return {"error": "No storage URL returned from upload", "success": False}
644
- async with sse_client(SERVER_URL) as (sse, write):
645
- async with ClientSession(sse, write) as session:
646
- await session.initialize()
647
- response = await session.call_tool("mistral_document_ocr", {"document_url": storage_url})
648
- if hasattr(response, 'content') and isinstance(response.content, list):
649
- for item in response.content:
650
- if hasattr(item, 'text') and item.text:
651
- try:
652
- data = json.loads(item.text)
653
- return data
654
- except Exception:
655
- return {"raw_pretty": json.dumps(item.text, indent=2)}
656
- if isinstance(response, dict):
657
- return response
658
- if isinstance(response, str):
659
- try:
660
- return json.loads(response)
661
- except Exception:
662
- return {"raw_pretty": json.dumps(response, indent=2)}
663
- return {"raw_pretty": json.dumps(str(response), indent=2)}
664
- except Exception as e:
665
- return {"error": f"Error processing document: {str(e)}", "success": False}
666
- doc_ocr_btn.click(
667
- fn=document_ocr_async,
668
- inputs=[doc_input],
669
- outputs=[doc_output]
670
- )
671
 
672
- # Tab 4: Analytics
673
- with gr.Tab("Analytics"):
674
- gr.Markdown("## Plagiarism Detection")
675
-
676
- with gr.Row():
677
- with gr.Column():
678
- submission_input = gr.Textbox(
679
- label="Student Submission",
680
- lines=5,
681
- value="The quadratic formula states that if ax² + bx + c = 0, then x = (-b ± √(b² - 4ac)) / 2a."
682
- )
683
- reference_input = gr.Textbox(
684
- label="Reference Source",
685
- lines=5,
686
- value="According to the quadratic formula, for any equation in the form ax² + bx + c = 0, the solutions are x = (-b ± √(b² - 4ac)) / 2a."
687
- )
688
- plagiarism_btn = gr.Button("Check Originality")
689
-
690
- with gr.Column():
691
- plagiarism_output = gr.JSON(label="Originality Report")
692
-
693
- async def check_plagiarism_async(submission, reference):
694
- async with sse_client(SERVER_URL) as (sse, write):
695
- async with ClientSession(sse, write) as session:
696
- await session.initialize()
697
- response = await session.call_tool("check_submission_originality", {"submission": submission, "reference_sources": [reference] if isinstance(reference, str) else reference})
698
- if hasattr(response, 'content') and isinstance(response.content, list):
699
- for item in response.content:
700
- if hasattr(item, 'text') and item.text:
701
- try:
702
- data = json.loads(item.text)
703
- return data
704
- except Exception:
705
- return {"raw_pretty": json.dumps(item.text, indent=2)}
706
- if isinstance(response, dict):
707
- return response
708
- if isinstance(response, str):
709
- try:
710
- return json.loads(response)
711
- except Exception:
712
- return {"raw_pretty": json.dumps(response, indent=2)}
713
- return {"raw_pretty": json.dumps(str(response), indent=2)}
714
-
715
- plagiarism_btn.click(
716
- fn=check_plagiarism_async,
717
- inputs=[submission_input, reference_input],
718
- outputs=[plagiarism_output]
719
- )
720
 
721
  # Launch the interface
722
  if __name__ == "__main__":
 
723
  demo.queue().launch(server_name="0.0.0.0", server_port=7860)
 
6
  import json
7
  import asyncio
8
  import gradio as gr
9
+ from typing import Optional, Dict, List, Tuple
10
  import requests
 
 
 
11
  import networkx as nx
12
  import matplotlib
13
  import matplotlib.pyplot as plt
 
14
  from datetime import datetime
15
 
16
  # Set matplotlib to use 'Agg' backend to avoid GUI issues in Gradio
 
19
  # Import MCP client components
20
  from mcp.client.sse import sse_client
21
  from mcp.client.session import ClientSession
 
22
 
23
  # Server configuration
24
  SERVER_URL = "https://tutorx-mcp.onrender.com/sse" # Ensure this is the SSE endpoint
 
44
  # Store the ping task reference
45
  ping_task = None
46
 
47
+ async def check_plagiarism_async(submission, reference):
48
+ """Check submission for plagiarism against reference sources"""
49
+ async with sse_client(SERVER_URL) as (sse, write):
50
+ async with ClientSession(sse, write) as session:
51
+ await session.initialize()
52
+ response = await session.call_tool(
53
+ "check_submission_originality",
54
+ {
55
+ "submission": submission,
56
+ "reference_sources": [reference] if isinstance(reference, str) else reference
57
+ }
58
+ )
59
+ if hasattr(response, 'content') and isinstance(response.content, list):
60
+ for item in response.content:
61
+ if hasattr(item, 'text') and item.text:
62
+ try:
63
+ data = json.loads(item.text)
64
+ return data
65
+ except Exception:
66
+ return {"raw_pretty": json.dumps(item.text, indent=2)}
67
+ if isinstance(response, dict):
68
+ return response
69
+ if isinstance(response, str):
70
+ try:
71
+ return json.loads(response)
72
+ except Exception:
73
+ return {"raw_pretty": json.dumps(response, indent=2)}
74
+ return {"raw_pretty": json.dumps(str(response), indent=2)}
75
+
76
  def start_ping_task():
77
  """Start the ping task when the Gradio app launches"""
78
  global ping_task
79
+ try:
80
+ if ping_task is None:
81
+ try:
82
+ loop = asyncio.get_event_loop()
83
+ except RuntimeError:
84
+ loop = asyncio.new_event_loop()
85
+ asyncio.set_event_loop(loop)
86
+
87
+ if loop.is_running():
88
+ ping_task = loop.create_task(start_periodic_ping())
89
+ print("Started periodic ping task")
90
+ else:
91
+ # If loop is not running, we'll start it in a separate thread
92
+ import threading
93
+ def start_loop():
94
+ asyncio.set_event_loop(loop)
95
+ loop.run_forever()
96
+
97
+ thread = threading.Thread(target=start_loop, daemon=True)
98
+ thread.start()
99
+ ping_task = asyncio.run_coroutine_threadsafe(start_periodic_ping(), loop)
100
+ print("Started periodic ping task in new thread")
101
+ except Exception as e:
102
+ print(f"Error starting ping task: {e}")
103
+
104
+ # Only run this code when the module is executed directly
105
+ if __name__ == "__main__" and not hasattr(gr, 'blocks'):
106
+ # This ensures we don't start the task when imported by Gradio
107
+ start_ping_task()
108
 
109
 
110
 
 
372
  except Exception as e:
373
  return None, {"error": str(e)}, []
374
 
375
+ # Define async functions outside the interface
376
+ async def on_generate_quiz(concept, difficulty):
377
+ try:
378
+ if not concept or not str(concept).strip():
379
+ return {"error": "Please enter a concept"}
380
+ try:
381
+ difficulty = int(float(difficulty))
382
+ difficulty = max(1, min(5, difficulty))
383
+ except (ValueError, TypeError):
384
+ difficulty = 3
385
+ if difficulty <= 2:
386
+ difficulty_str = "easy"
387
+ elif difficulty == 3:
388
+ difficulty_str = "medium"
389
+ else:
390
+ difficulty_str = "hard"
391
+ async with sse_client(SERVER_URL) as (sse, write):
392
+ async with ClientSession(sse, write) as session:
393
+ await session.initialize()
394
+ response = await session.call_tool("generate_quiz_tool", {"concept": concept.strip(), "difficulty": difficulty_str})
395
+ if hasattr(response, 'content') and isinstance(response.content, list):
396
+ for item in response.content:
397
+ if hasattr(item, 'text') and item.text:
398
+ try:
399
+ quiz_data = json.loads(item.text)
400
+ return quiz_data
401
+ except Exception:
402
+ return {"raw_pretty": json.dumps(item.text, indent=2)}
403
+ if isinstance(response, dict):
404
+ return response
405
+ if isinstance(response, str):
406
+ try:
407
+ return json.loads(response)
408
+ except Exception:
409
+ return {"raw_pretty": json.dumps(response, indent=2)}
410
+ return {"raw_pretty": json.dumps(str(response), indent=2)}
411
+ except Exception as e:
412
+ import traceback
413
+ return {
414
+ "error": f"Error generating quiz: {str(e)}\n\n{traceback.format_exc()}"
415
+ }
416
 
417
+ async def generate_lesson_async(topic, grade, duration):
418
+ async with sse_client(SERVER_URL) as (sse, write):
419
+ async with ClientSession(sse, write) as session:
420
+ await session.initialize()
421
+ response = await session.call_tool("generate_lesson_tool", {"topic": topic, "grade_level": grade, "duration_minutes": duration})
422
+ if hasattr(response, 'content') and isinstance(response.content, list):
423
+ for item in response.content:
424
+ if hasattr(item, 'text') and item.text:
425
+ try:
426
+ lesson_data = json.loads(item.text)
427
+ return lesson_data
428
+ except Exception:
429
+ return {"raw_pretty": json.dumps(item.text, indent=2)}
430
+ if isinstance(response, dict):
431
+ return response
432
+ if isinstance(response, str):
433
+ try:
434
+ return json.loads(response)
435
+ except Exception:
436
+ return {"raw_pretty": json.dumps(response, indent=2)}
437
+ return {"raw_pretty": json.dumps(str(response), indent=2)}
438
+
439
+ async def on_generate_learning_path(student_id, concept_ids, student_level):
440
+ try:
441
+ async with sse_client(SERVER_URL) as (sse, write):
442
+ async with ClientSession(sse, write) as session:
443
+ await session.initialize()
444
+ result = await session.call_tool("get_learning_path", {
445
+ "student_id": student_id,
446
+ "concept_ids": [c.strip() for c in concept_ids.split(",") if c.strip()],
447
+ "student_level": student_level
448
+ })
449
+ if hasattr(result, 'content') and isinstance(result.content, list):
450
+ for item in result.content:
451
+ if hasattr(item, 'text') and item.text:
452
+ try:
453
+ lp_data = json.loads(item.text)
454
+ return lp_data
455
+ except Exception:
456
+ return {"raw_pretty": json.dumps(item.text, indent=2)}
457
+ if isinstance(result, dict):
458
+ return result
459
+ if isinstance(result, str):
460
+ try:
461
+ return json.loads(result)
462
+ except Exception:
463
+ return {"raw_pretty": json.dumps(result, indent=2)}
464
+ return {"raw_pretty": json.dumps(str(result), indent=2)}
465
+ except Exception as e:
466
+ return {"error": str(e)}
467
+
468
+ async def text_interaction_async(text, student_id):
469
+ async with sse_client(SERVER_URL) as (sse, write):
470
+ async with ClientSession(sse, write) as session:
471
+ await session.initialize()
472
+ response = await session.call_tool("text_interaction", {"query": text, "student_id": student_id})
473
+ if hasattr(response, 'content') and isinstance(response.content, list):
474
+ for item in response.content:
475
+ if hasattr(item, 'text') and item.text:
476
+ try:
477
+ data = json.loads(item.text)
478
+ return data
479
+ except Exception:
480
+ return {"raw_pretty": json.dumps(item.text, indent=2)}
481
+ if isinstance(response, dict):
482
+ return response
483
+ if isinstance(response, str):
484
+ try:
485
+ return json.loads(response)
486
+ except Exception:
487
+ return {"raw_pretty": json.dumps(response, indent=2)}
488
+ return {"raw_pretty": json.dumps(str(response), indent=2)}
489
+
490
+ async def upload_file_to_storage(file_path):
491
+ """Helper function to upload file to storage API"""
492
+ try:
493
+ url = "https://storage-bucket-api.vercel.app/upload"
494
+ with open(file_path, 'rb') as f:
495
+ files = {'file': (os.path.basename(file_path), f)}
496
+ response = requests.post(url, files=files)
497
+ response.raise_for_status()
498
+ return response.json()
499
+ except Exception as e:
500
+ return {"error": f"Error uploading file to storage: {str(e)}", "success": False}
501
+
502
+ async def document_ocr_async(file):
503
+ if not file:
504
+ return {"error": "No file provided", "success": False}
505
+ try:
506
+ if isinstance(file, dict):
507
+ file_path = file.get("path", "")
508
+ else:
509
+ file_path = file
510
+ if not file_path or not os.path.exists(file_path):
511
+ return {"error": "File not found", "success": False}
512
+ upload_result = await upload_file_to_storage(file_path)
513
+ if not upload_result.get("success"):
514
+ return upload_result
515
+ storage_url = upload_result.get("storage_url")
516
+ if not storage_url:
517
+ return {"error": "No storage URL returned from upload", "success": False}
518
+ async with sse_client(SERVER_URL) as (sse, write):
519
+ async with ClientSession(sse, write) as session:
520
+ await session.initialize()
521
+ response = await session.call_tool("mistral_document_ocr", {"document_url": storage_url})
522
+ if hasattr(response, 'content') and isinstance(response.content, list):
523
+ for item in response.content:
524
+ if hasattr(item, 'text') and item.text:
525
+ try:
526
+ data = json.loads(item.text)
527
+ return data
528
+ except Exception:
529
+ return {"raw_pretty": json.dumps(item.text, indent=2)}
530
+ if isinstance(response, dict):
531
+ return response
532
+ if isinstance(response, str):
533
+ try:
534
+ return json.loads(response)
535
+ except Exception:
536
+ return {"raw_pretty": json.dumps(response, indent=2)}
537
+ return {"raw_pretty": json.dumps(str(response), indent=2)}
538
+ except Exception as e:
539
+ return {"error": f"Error processing document: {str(e)}", "success": False}
540
+
541
+ # Create Gradio interface
542
+ def create_gradio_interface():
543
  # Set a default student ID for the demo
544
  student_id = "student_12345"
545
+
546
+ with gr.Blocks(title="TutorX Educational AI", theme=gr.themes.Soft()) as demo:
547
+ # Start the ping task when the app loads
548
+ demo.load(
549
+ fn=start_ping_task,
550
+ inputs=None,
551
+ outputs=None,
552
+ queue=False
553
+ )
554
+
555
+ # Header Section
556
+ with gr.Row():
557
+ with gr.Column():
558
+ gr.Markdown("""
559
+ # 📚 TutorX Educational AI Platform
560
+ *An adaptive, multi-modal, and collaborative AI tutoring platform built with MCP.*
561
+ """)
562
+
563
+ # Add some spacing
564
+ gr.Markdown("---")
565
+
566
+ # Main Tabs with scrollable container
567
+ with gr.Tabs() as tabs:
568
+ # Tab 1: Core Features
569
+ with gr.Tab("1️⃣ Core Features", elem_id="core_features_tab"):
570
+ with gr.Row():
571
+ with gr.Column():
572
+ gr.Markdown("## 🔍 Concept Graph Visualization")
573
+ gr.Markdown("Explore relationships between educational concepts through an interactive graph visualization.")
574
+
575
  with gr.Row():
576
  # Left panel for controls and details
577
  with gr.Column(scale=3):
 
582
  value="machine_learning",
583
  scale=4
584
  )
585
+ load_btn = gr.Button("Load Graph", variant="primary", scale=1)
586
+
587
  # Concept details
588
  with gr.Accordion("Concept Details", open=True):
589
  concept_details = gr.JSON(
590
  label=None,
591
  show_label=False
592
  )
593
+
594
  # Related concepts and prerequisites
595
  with gr.Accordion("Related Concepts & Prerequisites", open=True):
596
  related_concepts = gr.Dataframe(
 
598
  datatype=["str", "str", "str"],
599
  interactive=False,
600
  wrap=True,
 
 
601
  )
602
+
603
+ # Graph visualization with a card-like container
604
  with gr.Column(scale=7):
605
+ with gr.Group():
606
+ graph_plot = gr.Plot(
607
+ label="Concept Graph",
608
+ show_label=True,
609
+ container=True
610
+ )
611
+
612
  # Event handlers
613
  load_btn.click(
614
  fn=sync_load_concept_graph,
615
  inputs=[concept_input],
616
  outputs=[graph_plot, concept_details, related_concepts]
617
  )
618
+
619
  # Load initial graph
620
  demo.load(
621
  fn=lambda: sync_load_concept_graph("machine_learning"),
622
  outputs=[graph_plot, concept_details, related_concepts]
623
  )
624
+
625
  # Help text and examples
626
  with gr.Row():
627
  gr.Markdown("""
 
631
  - `calculus`
632
  - `quantum_physics`
633
  """)
634
+
635
+ # Add some spacing between sections
636
+ gr.Markdown("---")
637
+
638
+ # Assessment Generation Section
639
+ with gr.Row():
640
+ with gr.Column():
641
+ gr.Markdown("## 📝 Assessment Generation")
642
+ gr.Markdown("Create customized quizzes and assessments based on educational concepts.")
643
+ gr.Markdown("---")
644
+
645
+ with gr.Row():
646
+ with gr.Column():
647
+ quiz_concept_input = gr.Textbox(
648
+ label="Enter Concept",
649
+ placeholder="e.g., Linear Equations, Photosynthesis, World War II",
650
+ lines=2
 
 
 
 
 
 
 
651
  )
652
+ with gr.Row():
653
+ diff_input = gr.Slider(
654
+ minimum=1,
655
+ maximum=5,
656
+ value=2,
657
+ step=1,
658
+ label="Difficulty Level",
659
+ interactive=True
660
+ )
661
+ gen_quiz_btn = gr.Button("Generate Quiz", variant="primary")
662
+
663
+ with gr.Column():
664
+ with gr.Group():
665
+ quiz_output = gr.JSON(label="Generated Quiz", show_label=True, container=True)
666
+
667
+ # Connect quiz generation button
668
+ gen_quiz_btn.click(
669
+ fn=on_generate_quiz,
670
+ inputs=[quiz_concept_input, diff_input],
671
+ outputs=[quiz_output],
672
+ api_name="generate_quiz"
673
+ )
674
 
675
+ # Tab 2: Advanced Features
676
+ with gr.Tab("2️⃣ Advanced Features", elem_id="advanced_features_tab"):
677
+ gr.Markdown("## Lesson Generation")
678
+
679
+ with gr.Row():
680
+ with gr.Column():
681
+ topic_input = gr.Textbox(label="Lesson Topic", value="Solving Quadratic Equations")
682
+ grade_input = gr.Slider(minimum=1, maximum=12, value=9, step=1, label="Grade Level")
683
+ duration_input = gr.Slider(minimum=15, maximum=90, value=45, step=5, label="Duration (minutes)")
684
+ gen_lesson_btn = gr.Button("Generate Lesson Plan")
685
+
686
+ with gr.Column():
687
+ lesson_output = gr.JSON(label="Lesson Plan")
688
+
689
+ # Connect lesson generation button
690
+ gen_lesson_btn.click(
691
+ fn=generate_lesson_async,
692
+ inputs=[topic_input, grade_input, duration_input],
693
+ outputs=[lesson_output]
694
+ )
695
+
696
+ gr.Markdown("## Learning Path Generation")
697
+ with gr.Row():
698
+ with gr.Column():
699
+ lp_student_id = gr.Textbox(label="Student ID", value=student_id)
700
+ lp_concept_ids = gr.Textbox(label="Concept IDs (comma-separated)", placeholder="e.g., python,functions,oop")
701
+ lp_student_level = gr.Dropdown(choices=["beginner", "intermediate", "advanced"], value="beginner", label="Student Level")
702
+ lp_btn = gr.Button("Generate Learning Path")
703
+ with gr.Column():
704
+ lp_output = gr.JSON(label="Learning Path")
705
+
706
+ # Connect learning path generation button
707
+ lp_btn.click(
708
+ fn=on_generate_learning_path,
709
+ inputs=[lp_student_id, lp_concept_ids, lp_student_level],
710
+ outputs=[lp_output]
711
+ )
 
 
 
 
 
 
 
 
 
 
712
 
713
+ # Tab 3: Interactive Tools
714
+ with gr.Tab("3️⃣ Interactive Tools", elem_id="interactive_tools_tab"):
715
+ gr.Markdown("## Text Interaction")
716
+
717
+ with gr.Row():
718
+ with gr.Column():
719
+ text_input = gr.Textbox(label="Ask a Question", value="How do I solve a quadratic equation?")
720
+ text_btn = gr.Button("Submit")
721
+
722
+ with gr.Column():
723
+ text_output = gr.JSON(label="Response")
724
+
725
+ # Connect text interaction button
726
+ text_btn.click(
727
+ fn=lambda text: text_interaction_async(text, student_id),
728
+ inputs=[text_input],
729
+ outputs=[text_output]
730
+ )
731
+
732
+ # Document OCR (PDF, images, etc.)
733
+ gr.Markdown("## Document OCR & LLM Analysis")
734
+ with gr.Row():
735
+ with gr.Column():
736
+ doc_input = gr.File(label="Upload PDF or Document", file_types=[".pdf", ".jpg", ".jpeg", ".png"])
737
+ doc_ocr_btn = gr.Button("Extract Text & Analyze")
738
+ with gr.Column():
739
+ doc_output = gr.JSON(label="Document OCR & LLM Analysis")
740
+
741
+ # Connect document OCR button
742
+ doc_ocr_btn.click(
743
+ fn=document_ocr_async,
744
+ inputs=[doc_input],
745
+ outputs=[doc_output]
746
+ )
747
 
748
+ # Tab 4: Data Analytics
749
+ with gr.Tab("4️⃣ Data Analytics", elem_id="data_analytics_tab"):
750
+ gr.Markdown("## Plagiarism Detection")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
751
 
752
+ with gr.Row():
753
+ with gr.Column():
754
+ submission_input = gr.Textbox(
755
+ label="Student Submission",
756
+ lines=5,
757
+ value="The quadratic formula states that if ax² + bx + c = 0, then x = (-b ± √(b² - 4ac)) / 2a."
758
+ )
759
+ reference_input = gr.Textbox(
760
+ label="Reference Source",
761
+ lines=5,
762
+ value="According to the quadratic formula, for any equation in the form ax² + bx + c = 0, the solutions are x = (-b ± √(- 4ac)) / 2a."
763
+ )
764
+ plagiarism_btn = gr.Button("Check Originality")
765
+
766
+ with gr.Column():
767
+ with gr.Group():
768
+ gr.Markdown("### 🔍 Originality Report")
769
+ plagiarism_output = gr.JSON(label="", show_label=False, container=False)
770
+
771
+ # Connect the button to the plagiarism check function
772
+ plagiarism_btn.click(
773
+ fn=check_plagiarism_async,
774
+ inputs=[submission_input, reference_input],
775
+ outputs=[plagiarism_output]
776
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
777
 
778
+ # Footer
779
+ gr.Markdown("---")
780
  with gr.Row():
781
  with gr.Column():
782
+ gr.Markdown("### About TutorX")
783
+ gr.Markdown("""
784
+ TutorX is an AI-powered educational platform designed to enhance learning through interactive tools and personalized content.
785
+ """)
786
  with gr.Column():
787
+ gr.Markdown("### Quick Links")
788
+ gr.Markdown("""
789
+ - [Documentation](#)
790
+ - [GitHub Repository](#)
791
+ - [Report an Issue](#)
792
+ """)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
793
 
794
+ # Add some spacing at the bottom
795
+ gr.Markdown("\n\n")
796
+ gr.Markdown("---")
797
+ gr.Markdown("© 2025 TutorX - All rights reserved")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
798
 
799
+ return demo
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
800
 
801
  # Launch the interface
802
  if __name__ == "__main__":
803
+ demo = create_gradio_interface()
804
  demo.queue().launch(server_name="0.0.0.0", server_port=7860)
run.py CHANGED
@@ -79,10 +79,11 @@ def run_gradio_interface(port=7860):
79
  if mcp_server_dir not in sys.path:
80
  sys.path.insert(0, mcp_server_dir)
81
 
82
- # Import and run the Gradio app
83
- from app import demo
84
 
85
- # Launch the Gradio interface
 
86
  demo.launch(
87
  server_name="0.0.0.0",
88
  server_port=port,
 
79
  if mcp_server_dir not in sys.path:
80
  sys.path.insert(0, mcp_server_dir)
81
 
82
+ # Import and create the Gradio app
83
+ from app import create_gradio_interface
84
 
85
+ # Create and launch the Gradio interface
86
+ demo = create_gradio_interface()
87
  demo.launch(
88
  server_name="0.0.0.0",
89
  server_port=port,