File size: 2,710 Bytes
a14fe7d
473cdbb
a14fe7d
4febf46
473cdbb
a14fe7d
7b850de
4febf46
a14fe7d
7b850de
473cdbb
4febf46
7b850de
 
720c014
7b850de
473cdbb
4febf46
a14fe7d
4febf46
7b850de
4febf46
 
 
 
 
 
720c014
4febf46
 
 
 
 
 
7b850de
720c014
4febf46
7b850de
4febf46
720c014
7b850de
a14fe7d
7b850de
4febf46
7b850de
 
a14fe7d
4febf46
a14fe7d
4febf46
 
 
7b850de
4febf46
7b850de
4febf46
 
 
7b850de
 
4febf46
7b850de
a14fe7d
4febf46
 
 
7b850de
a14fe7d
7b850de
4febf46
 
 
720c014
4febf46
a14fe7d
4febf46
a14fe7d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import gradio as gr
from huggingface_hub import InferenceClient

# Initialize the Hugging Face Inference Client
client = InferenceClient()

# Function to generate the math lesson with LaTeX formatting
def generate_stream(selected_topic, input_text):
    """
    Generates a dynamic math lesson with LaTeX-rendered equations and explanations.
    """
    prompt = (
        f"Create a detailed lesson on solving the following system of equations using the elimination method: {input_text}. "
        "Include step-by-step explanations and render all equations in LaTeX format."
        if input_text.strip()
        else f"Create a beginner-level lesson for solving systems of equations with examples in LaTeX format."
    )
    messages = [{"role": "user", "content": prompt}]

    try:
        # Generate the content using Hugging Face
        stream = client.chat.completions.create(
            model="Qwen/Qwen2.5-Coder-32B-Instruct",  # Streaming model
            messages=messages,
            temperature=0.5,
            max_tokens=1024,
            top_p=0.7,
            stream=True,
        )

        # Stream the generated content incrementally
        generated_content = ""
        for chunk in stream:
            generated_content += chunk.choices[0].delta.content
            yield generated_content  # Incremental updates

    except Exception as e:
        yield f"Error: {e}"  # Handle exceptions gracefully


# Create the Gradio app interface
with gr.Blocks() as app:
    gr.Markdown("## πŸŽ“ Solve Systems of Linear Equations with LaTeX")
    gr.Markdown(
        "Generate dynamic lessons on solving systems of equations step-by-step. "
        "The content includes explanations and LaTeX-rendered equations for better understanding!"
    )

    with gr.Row():
        # Input Section
        with gr.Column():
            selected_topic = gr.Radio(
                choices=["Math"],
                label="Select a Topic",
                value="Math"  # Only math is needed here
            )
            input_text = gr.Textbox(
                lines=2,
                label="Input Equations",
                placeholder="Enter the system of equations (e.g., '4x + 3y = 56, 7x + 6y = 54')"
            )
            generate_button = gr.Button("Generate Lesson")

        # Output Section
        with gr.Column():
            gr.Markdown("### Generated Content")
            output_stream = gr.Markdown()  # Output content as Markdown for LaTeX rendering

    # Link the generate button to the function
    generate_button.click(
        fn=generate_stream,
        inputs=[selected_topic, input_text],
        outputs=output_stream,
    )

# Launch the Gradio app
app.launch()