Update app.py
Browse files
app.py
CHANGED
@@ -4,28 +4,21 @@ from huggingface_hub import InferenceClient
|
|
4 |
# Initialize the Hugging Face Inference Client
|
5 |
client = InferenceClient()
|
6 |
|
7 |
-
# Function to
|
8 |
def generate_stream(selected_topic, input_text):
|
9 |
"""
|
10 |
-
Generates dynamic
|
11 |
-
|
12 |
-
Args:
|
13 |
-
selected_topic (str): The selected subject (e.g., Math, STEM, or Code Generation).
|
14 |
-
input_text (str): Additional input for contextual content generation.
|
15 |
-
|
16 |
-
Yields:
|
17 |
-
str: Incremental content, including Markdown/LaTeX math formatting.
|
18 |
"""
|
19 |
-
# Create a topic-specific prompt
|
20 |
prompt = (
|
21 |
-
f"
|
|
|
22 |
if input_text.strip()
|
23 |
-
else f"
|
24 |
)
|
25 |
messages = [{"role": "user", "content": prompt}]
|
26 |
|
27 |
try:
|
28 |
-
#
|
29 |
stream = client.chat.completions.create(
|
30 |
model="Qwen/Qwen2.5-Coder-32B-Instruct", # Streaming model
|
31 |
messages=messages,
|
@@ -39,42 +32,41 @@ def generate_stream(selected_topic, input_text):
|
|
39 |
generated_content = ""
|
40 |
for chunk in stream:
|
41 |
generated_content += chunk.choices[0].delta.content
|
42 |
-
yield generated_content #
|
43 |
|
44 |
except Exception as e:
|
45 |
-
yield f"Error: {e}" #
|
46 |
|
47 |
|
48 |
-
# Create the Gradio interface
|
49 |
with gr.Blocks() as app:
|
50 |
-
|
51 |
-
gr.Markdown("## π STEM Learning and Code Generator with LaTeX")
|
52 |
gr.Markdown(
|
53 |
-
"
|
54 |
-
"
|
55 |
)
|
56 |
|
57 |
with gr.Row():
|
58 |
# Input Section
|
59 |
with gr.Column():
|
60 |
selected_topic = gr.Radio(
|
61 |
-
choices=["Math"
|
62 |
label="Select a Topic",
|
63 |
-
value="Math"
|
64 |
)
|
65 |
input_text = gr.Textbox(
|
66 |
lines=2,
|
67 |
-
label="
|
68 |
-
placeholder="
|
69 |
)
|
70 |
-
generate_button = gr.Button("Generate
|
71 |
|
72 |
# Output Section
|
73 |
with gr.Column():
|
74 |
gr.Markdown("### Generated Content")
|
75 |
-
output_stream = gr.Markdown() #
|
76 |
|
77 |
-
# Link the generate button to the
|
78 |
generate_button.click(
|
79 |
fn=generate_stream,
|
80 |
inputs=[selected_topic, input_text],
|
|
|
4 |
# Initialize the Hugging Face Inference Client
|
5 |
client = InferenceClient()
|
6 |
|
7 |
+
# Function to generate the math lesson with LaTeX formatting
|
8 |
def generate_stream(selected_topic, input_text):
|
9 |
"""
|
10 |
+
Generates a dynamic math lesson with LaTeX-rendered equations and explanations.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
"""
|
|
|
12 |
prompt = (
|
13 |
+
f"Create a detailed lesson on solving the following system of equations using the elimination method: {input_text}. "
|
14 |
+
"Include step-by-step explanations and render all equations in LaTeX format."
|
15 |
if input_text.strip()
|
16 |
+
else f"Create a beginner-level lesson for solving systems of equations with examples in LaTeX format."
|
17 |
)
|
18 |
messages = [{"role": "user", "content": prompt}]
|
19 |
|
20 |
try:
|
21 |
+
# Generate the content using Hugging Face
|
22 |
stream = client.chat.completions.create(
|
23 |
model="Qwen/Qwen2.5-Coder-32B-Instruct", # Streaming model
|
24 |
messages=messages,
|
|
|
32 |
generated_content = ""
|
33 |
for chunk in stream:
|
34 |
generated_content += chunk.choices[0].delta.content
|
35 |
+
yield generated_content # Incremental updates
|
36 |
|
37 |
except Exception as e:
|
38 |
+
yield f"Error: {e}" # Handle exceptions gracefully
|
39 |
|
40 |
|
41 |
+
# Create the Gradio app interface
|
42 |
with gr.Blocks() as app:
|
43 |
+
gr.Markdown("## π Solve Systems of Linear Equations with LaTeX")
|
|
|
44 |
gr.Markdown(
|
45 |
+
"Generate dynamic lessons on solving systems of equations step-by-step. "
|
46 |
+
"The content includes explanations and LaTeX-rendered equations for better understanding!"
|
47 |
)
|
48 |
|
49 |
with gr.Row():
|
50 |
# Input Section
|
51 |
with gr.Column():
|
52 |
selected_topic = gr.Radio(
|
53 |
+
choices=["Math"],
|
54 |
label="Select a Topic",
|
55 |
+
value="Math" # Only math is needed here
|
56 |
)
|
57 |
input_text = gr.Textbox(
|
58 |
lines=2,
|
59 |
+
label="Input Equations",
|
60 |
+
placeholder="Enter the system of equations (e.g., '4x + 3y = 56, 7x + 6y = 54')"
|
61 |
)
|
62 |
+
generate_button = gr.Button("Generate Lesson")
|
63 |
|
64 |
# Output Section
|
65 |
with gr.Column():
|
66 |
gr.Markdown("### Generated Content")
|
67 |
+
output_stream = gr.Markdown() # Output content as Markdown for LaTeX rendering
|
68 |
|
69 |
+
# Link the generate button to the function
|
70 |
generate_button.click(
|
71 |
fn=generate_stream,
|
72 |
inputs=[selected_topic, input_text],
|