mgbam commited on
Commit
720c014
Β·
verified Β·
1 Parent(s): 4febf46

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -17
app.py CHANGED
@@ -4,23 +4,23 @@ from huggingface_hub import InferenceClient
4
  # Initialize the Hugging Face Inference Client
5
  client = InferenceClient()
6
 
7
- # Function to stream content for Math, STEM, and Code Generation
8
  def generate_stream(selected_topic, input_text):
9
  """
10
- Generates dynamic lessons, solutions, or code snippets based on the selected topic.
11
-
12
  Args:
13
  selected_topic (str): The selected subject (e.g., Math, STEM, or Code Generation).
14
  input_text (str): Additional input for contextual content generation.
15
 
16
  Yields:
17
- str: Incremental output content.
18
  """
19
  # Create a topic-specific prompt
20
  prompt = (
21
- f"Generate a {selected_topic.lower()} lesson, problem, or example based on the following input: {input_text}"
22
- if input_text.strip() else
23
- f"Generate a beginner-level {selected_topic.lower()} lesson with examples."
24
  )
25
  messages = [{"role": "user", "content": prompt}]
26
 
@@ -32,7 +32,7 @@ def generate_stream(selected_topic, input_text):
32
  temperature=0.5,
33
  max_tokens=1024,
34
  top_p=0.7,
35
- stream=True
36
  )
37
 
38
  # Stream the generated content incrementally
@@ -40,16 +40,18 @@ def generate_stream(selected_topic, input_text):
40
  for chunk in stream:
41
  generated_content += chunk.choices[0].delta.content
42
  yield generated_content # Yield content incrementally
 
43
  except Exception as e:
44
  yield f"Error: {e}" # Display error if any issues occur
45
 
 
46
  # Create the Gradio interface
47
  with gr.Blocks() as app:
48
  # App Title and Instructions
49
- gr.Markdown("## πŸŽ“ STEM Learning and Code Generator")
50
  gr.Markdown(
51
  "Get dynamic lessons, problem-solving examples, or code snippets for Math, STEM, "
52
- "or Computer Science. Select a topic and get started!"
53
  )
54
 
55
  with gr.Row():
@@ -58,29 +60,28 @@ with gr.Blocks() as app:
58
  selected_topic = gr.Radio(
59
  choices=["Math", "STEM", "Computer Science (Code Generation)"],
60
  label="Select a Topic",
61
- value="Math" # Default selection
62
  )
63
  input_text = gr.Textbox(
64
  lines=2,
65
  label="Optional Input",
66
- placeholder="Provide additional context (e.g., 'Explain calculus basics' or 'Generate Python code for sorting')."
67
  )
68
  generate_button = gr.Button("Generate Content")
69
 
70
  # Output Section
71
  with gr.Column():
72
  gr.Markdown("### Generated Content")
73
- output_stream = gr.Textbox(
74
- lines=15,
75
- label="Output",
76
- interactive=False
77
  )
78
 
79
  # Link the generate button to the streaming function
80
  generate_button.click(
81
  fn=generate_stream,
82
  inputs=[selected_topic, input_text],
83
- outputs=output_stream
84
  )
85
 
86
  # Launch the Gradio app
 
4
  # Initialize the Hugging Face Inference Client
5
  client = InferenceClient()
6
 
7
+ # Function to stream content with Math, STEM, or Code Generation, including LaTeX
8
  def generate_stream(selected_topic, input_text):
9
  """
10
+ Generates dynamic lessons, solutions, or code snippets with LaTeX-style formatting.
11
+
12
  Args:
13
  selected_topic (str): The selected subject (e.g., Math, STEM, or Code Generation).
14
  input_text (str): Additional input for contextual content generation.
15
 
16
  Yields:
17
+ str: Incremental content, including Markdown/LaTeX math formatting.
18
  """
19
  # Create a topic-specific prompt
20
  prompt = (
21
+ f"Generate a {selected_topic.lower()} lesson, problem, or example with step-by-step explanations and LaTeX math formatting based on the following input: {input_text}"
22
+ if input_text.strip()
23
+ else f"Generate a beginner-level {selected_topic.lower()} lesson with examples and LaTeX math formatting."
24
  )
25
  messages = [{"role": "user", "content": prompt}]
26
 
 
32
  temperature=0.5,
33
  max_tokens=1024,
34
  top_p=0.7,
35
+ stream=True,
36
  )
37
 
38
  # Stream the generated content incrementally
 
40
  for chunk in stream:
41
  generated_content += chunk.choices[0].delta.content
42
  yield generated_content # Yield content incrementally
43
+
44
  except Exception as e:
45
  yield f"Error: {e}" # Display error if any issues occur
46
 
47
+
48
  # Create the Gradio interface
49
  with gr.Blocks() as app:
50
  # App Title and Instructions
51
+ gr.Markdown("## πŸŽ“ STEM Learning and Code Generator with LaTeX")
52
  gr.Markdown(
53
  "Get dynamic lessons, problem-solving examples, or code snippets for Math, STEM, "
54
+ "or Computer Science. Includes LaTeX support for equations and step-by-step breakdowns!"
55
  )
56
 
57
  with gr.Row():
 
60
  selected_topic = gr.Radio(
61
  choices=["Math", "STEM", "Computer Science (Code Generation)"],
62
  label="Select a Topic",
63
+ value="Math", # Default selection
64
  )
65
  input_text = gr.Textbox(
66
  lines=2,
67
  label="Optional Input",
68
+ placeholder="Provide additional context (e.g., 'Explain calculus basics' or 'Generate Python code for sorting').",
69
  )
70
  generate_button = gr.Button("Generate Content")
71
 
72
  # Output Section
73
  with gr.Column():
74
  gr.Markdown("### Generated Content")
75
+ output_stream = gr.Markdown(
76
+ placeholder="Generated content will appear here, with support for LaTeX math equations...",
77
+ interactive=False,
 
78
  )
79
 
80
  # Link the generate button to the streaming function
81
  generate_button.click(
82
  fn=generate_stream,
83
  inputs=[selected_topic, input_text],
84
+ outputs=output_stream,
85
  )
86
 
87
  # Launch the Gradio app