File size: 5,113 Bytes
712eefd
 
 
2554d32
712eefd
 
 
 
ccf6147
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2554d32
598d79e
2554d32
 
598d79e
6fcbd03
077c11a
712eefd
 
1d88ef6
2445961
712eefd
2554d32
ccf6147
5b91eaf
712eefd
598d79e
712eefd
 
2554d32
712eefd
598d79e
 
 
 
 
 
 
 
 
 
42ff9fc
 
 
 
 
598d79e
 
 
95a4bda
0489589
598d79e
4cefc88
598d79e
626df00
6fcbd03
598d79e
712eefd
0489589
598d79e
 
 
 
 
 
 
 
712eefd
598d79e
 
 
 
 
 
 
 
712eefd
598d79e
 
 
 
 
42ff9fc
598d79e
 
712eefd
598d79e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
712eefd
95a4bda
 
 
 
 
 
 
 
 
712eefd
2554d32
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
import gradio as gr
from huggingface_hub import InferenceClient
import os  # For environment variables
import time  # To simulate processing time if needed

# Initialize the Hugging Face Inference Client
client = InferenceClient()



def preprocess_latex(content):
    # Split content into lines for better formatting
    lines = content.split("\n")
    formatted_lines = []
    
    for line in lines:
        # If a line contains equations or math expressions, wrap them properly
        if "Simplify" in line or "Solve" in line or "boxed" in line or "frac" in line:
            formatted_lines.append(f"$$ {line.strip()} $$")  # Block math
        elif "(" in line and ")" in line:  # Inline math for variables
            formatted_lines.append(line.replace("(", "$").replace(")", "$"))
        else:
            formatted_lines.append(line)  # Plain text

    # Join lines back into a single string
    return "\n".join(formatted_lines)

# Function to generate and format AI response
def generate_response(prompt_template, **kwargs):
    # Simulate processing/loading
    time.sleep(1)  # Optional: Remove or adjust based on actual execution time
    prompt = os.getenv(prompt_template).format(**kwargs)
    response = client.chat.completions.create(
        model="Qwen/Qwen2.5-Math-1.5B-Instruct",
        messages=[{"role": "user", "content": prompt}],
        temperature=0.7,
        max_tokens=1024,
        top_p=0.8
    )
    response_content = response.choices[0].message["content"]
    formatted_response = preprocess_latex(response_content)
    return gr.update(value=f"{formatted_response}")

# Gradio app interface
with gr.Blocks() as app:
    gr.Markdown("## Mathematical Insight Tutor")
    gr.Markdown("An advanced AI-powered tutor to help you master math concepts with step-by-step explanations.")

    def create_tab(tab_name, prompt_template, inputs):
        with gr.Tab(tab_name):
            input_fields = []
            for inp in inputs:
                if inp["type"] == "textbox":
                    input_fields.append(
                        gr.Textbox(lines=inp.get("lines", 1), label=inp["label"], placeholder=inp["placeholder"])
                    )
                elif inp["type"] == "dropdown":
                    input_fields.append(
                        gr.Dropdown(choices=inp["choices"], label=inp["label"])
                    )
                elif inp["type"] == "value":
                    input_fields.append(
                        gr.Textbox(label=inp["label"], placeholder=inp["placeholder"])
                    )
            # Button and output
            button = gr.Button(f"{tab_name} Execute")
            output = gr.Markdown(label="Output", elem_id="latex-output")
            # Link button to the response wrapper
            button.click(
                fn=lambda *args: generate_response(prompt_template, **dict(zip([inp["key"] for inp in inputs], args))),
                inputs=input_fields,
                outputs=output,
                api_name=f"/{tab_name.lower().replace(' ', '_')}_execute"
            )

    # Tabs for functionalities
    create_tab(
        "Solve a Problem",
        "PROMPT_SOLVE",
        [
            {"key": "problem", "type": "textbox", "label": "Enter Math Problem", "placeholder": "e.g., Solve for x: 2x + 5 = 15"},
            {"key": "difficulty", "type": "dropdown", "label": "Difficulty Level", "choices": ["Beginner", "Intermediate", "Advanced"]}
        ]
    )

    create_tab(
        "Generate a Hint",
        "PROMPT_HINT",
        [
            {"key": "problem", "type": "textbox", "label": "Enter Math Problem for Hint", "placeholder": "e.g., Solve for x: 2x + 5 = 15"},
            {"key": "difficulty", "type": "dropdown", "label": "Difficulty Level", "choices": ["Beginner", "Intermediate", "Advanced"]}
        ]
    )

    create_tab(
        "Verify Solution",
        "PROMPT_VERIFY",
        [
            {"key": "problem", "type": "textbox", "label": "Enter Math Problem", "placeholder": "e.g., Solve for x: 2x + 5 = 15"},
            {"key": "solution", "type": "value", "label": "Enter Your Solution", "placeholder": "e.g., x = 5"}
        ]
    )

    create_tab(
        "Generate Practice Question",
        "PROMPT_GENERATE",
        [
            {"key": "topic", "type": "textbox", "label": "Enter Math Topic", "placeholder": "e.g., Algebra, Calculus"},
            {"key": "difficulty", "type": "dropdown", "label": "Difficulty Level", "choices": ["Beginner", "Intermediate", "Advanced"]}
        ]
    )

    create_tab(
        "Explain Concept",
        "PROMPT_EXPLAIN",
        [
            {"key": "problem", "type": "textbox", "label": "Enter Math Problem", "placeholder": "e.g., Solve for x: 2x + 5 = 15"},
            {"key": "difficulty", "type": "dropdown", "label": "Difficulty Level", "choices": ["Beginner", "Intermediate", "Advanced"]}
        ]
    )

# Add custom CSS for LaTeX rendering
app.css = """
#latex-output {
    font-family: "Computer Modern", serif;
    font-size: 16px;
    line-height: 1.5;
}
"""

# Launch the app
app.launch(debug=True)