mgbam commited on
Commit
7b850de
Β·
verified Β·
1 Parent(s): 7fc3220

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -27
app.py CHANGED
@@ -4,28 +4,21 @@ from huggingface_hub import InferenceClient
4
  # Initialize the Hugging Face Inference Client
5
  client = InferenceClient()
6
 
7
- # Function to stream content with Math, STEM, or Code Generation, including LaTeX
8
  def generate_stream(selected_topic, input_text):
9
  """
10
- Generates dynamic lessons, solutions, or code snippets with LaTeX-style formatting.
11
-
12
- Args:
13
- selected_topic (str): The selected subject (e.g., Math, STEM, or Code Generation).
14
- input_text (str): Additional input for contextual content generation.
15
-
16
- Yields:
17
- str: Incremental content, including Markdown/LaTeX math formatting.
18
  """
19
- # Create a topic-specific prompt
20
  prompt = (
21
- f"Generate a {selected_topic.lower()} lesson, problem, or example with step-by-step explanations and LaTeX math formatting based on the following input: {input_text}"
 
22
  if input_text.strip()
23
- else f"Generate a beginner-level {selected_topic.lower()} lesson with examples and LaTeX math formatting."
24
  )
25
  messages = [{"role": "user", "content": prompt}]
26
 
27
  try:
28
- # Create a stream for generating content
29
  stream = client.chat.completions.create(
30
  model="Qwen/Qwen2.5-Coder-32B-Instruct", # Streaming model
31
  messages=messages,
@@ -39,42 +32,41 @@ def generate_stream(selected_topic, input_text):
39
  generated_content = ""
40
  for chunk in stream:
41
  generated_content += chunk.choices[0].delta.content
42
- yield generated_content # Yield content incrementally
43
 
44
  except Exception as e:
45
- yield f"Error: {e}" # Display error if any issues occur
46
 
47
 
48
- # Create the Gradio interface
49
  with gr.Blocks() as app:
50
- # App Title and Instructions
51
- gr.Markdown("## πŸŽ“ STEM Learning and Code Generator with LaTeX")
52
  gr.Markdown(
53
- "Get dynamic lessons, problem-solving examples, or code snippets for Math, STEM, "
54
- "or Computer Science. Includes LaTeX support for equations and step-by-step breakdowns!"
55
  )
56
 
57
  with gr.Row():
58
  # Input Section
59
  with gr.Column():
60
  selected_topic = gr.Radio(
61
- choices=["Math", "STEM", "Computer Science (Code Generation)"],
62
  label="Select a Topic",
63
- value="Math", # Default selection
64
  )
65
  input_text = gr.Textbox(
66
  lines=2,
67
- label="Optional Input",
68
- placeholder="Provide additional context (e.g., 'Explain calculus basics' or 'Generate Python code for sorting').",
69
  )
70
- generate_button = gr.Button("Generate Content")
71
 
72
  # Output Section
73
  with gr.Column():
74
  gr.Markdown("### Generated Content")
75
- output_stream = gr.Markdown() # Removed placeholder argument
76
 
77
- # Link the generate button to the streaming function
78
  generate_button.click(
79
  fn=generate_stream,
80
  inputs=[selected_topic, input_text],
 
4
  # Initialize the Hugging Face Inference Client
5
  client = InferenceClient()
6
 
7
+ # Function to generate the math lesson with LaTeX formatting
8
  def generate_stream(selected_topic, input_text):
9
  """
10
+ Generates a dynamic math lesson with LaTeX-rendered equations and explanations.
 
 
 
 
 
 
 
11
  """
 
12
  prompt = (
13
+ f"Create a detailed lesson on solving the following system of equations using the elimination method: {input_text}. "
14
+ "Include step-by-step explanations and render all equations in LaTeX format."
15
  if input_text.strip()
16
+ else f"Create a beginner-level lesson for solving systems of equations with examples in LaTeX format."
17
  )
18
  messages = [{"role": "user", "content": prompt}]
19
 
20
  try:
21
+ # Generate the content using Hugging Face
22
  stream = client.chat.completions.create(
23
  model="Qwen/Qwen2.5-Coder-32B-Instruct", # Streaming model
24
  messages=messages,
 
32
  generated_content = ""
33
  for chunk in stream:
34
  generated_content += chunk.choices[0].delta.content
35
+ yield generated_content # Incremental updates
36
 
37
  except Exception as e:
38
+ yield f"Error: {e}" # Handle exceptions gracefully
39
 
40
 
41
+ # Create the Gradio app interface
42
  with gr.Blocks() as app:
43
+ gr.Markdown("## πŸŽ“ Solve Systems of Linear Equations with LaTeX")
 
44
  gr.Markdown(
45
+ "Generate dynamic lessons on solving systems of equations step-by-step. "
46
+ "The content includes explanations and LaTeX-rendered equations for better understanding!"
47
  )
48
 
49
  with gr.Row():
50
  # Input Section
51
  with gr.Column():
52
  selected_topic = gr.Radio(
53
+ choices=["Math"],
54
  label="Select a Topic",
55
+ value="Math" # Only math is needed here
56
  )
57
  input_text = gr.Textbox(
58
  lines=2,
59
+ label="Input Equations",
60
+ placeholder="Enter the system of equations (e.g., '4x + 3y = 56, 7x + 6y = 54')"
61
  )
62
+ generate_button = gr.Button("Generate Lesson")
63
 
64
  # Output Section
65
  with gr.Column():
66
  gr.Markdown("### Generated Content")
67
+ output_stream = gr.Markdown() # Output content as Markdown for LaTeX rendering
68
 
69
+ # Link the generate button to the function
70
  generate_button.click(
71
  fn=generate_stream,
72
  inputs=[selected_topic, input_text],