mgbam commited on
Commit
65151e2
Β·
verified Β·
1 Parent(s): 7b850de

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -24
app.py CHANGED
@@ -4,73 +4,83 @@ from huggingface_hub import InferenceClient
4
  # Initialize the Hugging Face Inference Client
5
  client = InferenceClient()
6
 
7
- # Function to generate the math lesson with LaTeX formatting
8
  def generate_stream(selected_topic, input_text):
9
  """
10
- Generates a dynamic math lesson with LaTeX-rendered equations and explanations.
 
 
 
 
 
 
 
11
  """
 
12
  prompt = (
13
- f"Create a detailed lesson on solving the following system of equations using the elimination method: {input_text}. "
14
- "Include step-by-step explanations and render all equations in LaTeX format."
15
- if input_text.strip()
16
- else f"Create a beginner-level lesson for solving systems of equations with examples in LaTeX format."
17
  )
18
  messages = [{"role": "user", "content": prompt}]
19
 
20
  try:
21
- # Generate the content using Hugging Face
22
  stream = client.chat.completions.create(
23
  model="Qwen/Qwen2.5-Coder-32B-Instruct", # Streaming model
24
  messages=messages,
25
  temperature=0.5,
26
  max_tokens=1024,
27
  top_p=0.7,
28
- stream=True,
29
  )
30
 
31
  # Stream the generated content incrementally
32
  generated_content = ""
33
  for chunk in stream:
34
  generated_content += chunk.choices[0].delta.content
35
- yield generated_content # Incremental updates
36
-
37
  except Exception as e:
38
- yield f"Error: {e}" # Handle exceptions gracefully
39
 
40
-
41
- # Create the Gradio app interface
42
  with gr.Blocks() as app:
43
- gr.Markdown("## πŸŽ“ Solve Systems of Linear Equations with LaTeX")
 
44
  gr.Markdown(
45
- "Generate dynamic lessons on solving systems of equations step-by-step. "
46
- "The content includes explanations and LaTeX-rendered equations for better understanding!"
47
  )
48
 
49
  with gr.Row():
50
  # Input Section
51
  with gr.Column():
52
  selected_topic = gr.Radio(
53
- choices=["Math"],
54
  label="Select a Topic",
55
- value="Math" # Only math is needed here
56
  )
57
  input_text = gr.Textbox(
58
  lines=2,
59
- label="Input Equations",
60
- placeholder="Enter the system of equations (e.g., '4x + 3y = 56, 7x + 6y = 54')"
61
  )
62
- generate_button = gr.Button("Generate Lesson")
63
 
64
  # Output Section
65
  with gr.Column():
66
  gr.Markdown("### Generated Content")
67
- output_stream = gr.Markdown() # Output content as Markdown for LaTeX rendering
 
 
 
 
68
 
69
- # Link the generate button to the function
70
  generate_button.click(
71
  fn=generate_stream,
72
  inputs=[selected_topic, input_text],
73
- outputs=output_stream,
74
  )
75
 
76
  # Launch the Gradio app
 
4
  # Initialize the Hugging Face Inference Client
5
  client = InferenceClient()
6
 
7
+ # Function to stream content for Math, STEM, and Code Generation
8
  def generate_stream(selected_topic, input_text):
9
  """
10
+ Generates dynamic lessons, solutions, or code snippets based on the selected topic.
11
+
12
+ Args:
13
+ selected_topic (str): The selected subject (e.g., Math, STEM, or Code Generation).
14
+ input_text (str): Additional input for contextual content generation.
15
+
16
+ Yields:
17
+ str: Incremental output content.
18
  """
19
+ # Create a topic-specific prompt
20
  prompt = (
21
+ f"Generate a {selected_topic.lower()} lesson, problem, or example based on the following input: {input_text}"
22
+ if input_text.strip() else
23
+ f"Generate a beginner-level {selected_topic.lower()} lesson with examples."
 
24
  )
25
  messages = [{"role": "user", "content": prompt}]
26
 
27
  try:
28
+ # Create a stream for generating content
29
  stream = client.chat.completions.create(
30
  model="Qwen/Qwen2.5-Coder-32B-Instruct", # Streaming model
31
  messages=messages,
32
  temperature=0.5,
33
  max_tokens=1024,
34
  top_p=0.7,
35
+ stream=True
36
  )
37
 
38
  # Stream the generated content incrementally
39
  generated_content = ""
40
  for chunk in stream:
41
  generated_content += chunk.choices[0].delta.content
42
+ yield generated_content # Yield content incrementally
 
43
  except Exception as e:
44
+ yield f"Error: {e}" # Display error if any issues occur
45
 
46
+ # Create the Gradio interface
 
47
  with gr.Blocks() as app:
48
+ # App Title and Instructions
49
+ gr.Markdown("## πŸŽ“ STEM Learning and Code Generator")
50
  gr.Markdown(
51
+ "Get dynamic lessons, problem-solving examples, or code snippets for Math, STEM, "
52
+ "or Computer Science. Select a topic and get started!"
53
  )
54
 
55
  with gr.Row():
56
  # Input Section
57
  with gr.Column():
58
  selected_topic = gr.Radio(
59
+ choices=["Math", "STEM", "Computer Science (Code Generation)"],
60
  label="Select a Topic",
61
+ value="Math" # Default selection
62
  )
63
  input_text = gr.Textbox(
64
  lines=2,
65
+ label="Optional Input",
66
+ placeholder="Provide additional context (e.g., 'Explain calculus basics' or 'Generate Python code for sorting')."
67
  )
68
+ generate_button = gr.Button("Generate Content")
69
 
70
  # Output Section
71
  with gr.Column():
72
  gr.Markdown("### Generated Content")
73
+ output_stream = gr.TextArea(
74
+ label="Output",
75
+ placeholder="Generated content will appear here...",
76
+ interactive=False
77
+ )
78
 
79
+ # Link the generate button to the streaming function
80
  generate_button.click(
81
  fn=generate_stream,
82
  inputs=[selected_topic, input_text],
83
+ outputs=output_stream
84
  )
85
 
86
  # Launch the Gradio app