CodeOpt / app.py
mgbam's picture
Update app.py
7fc3220 verified
raw
history blame
3.13 kB
import gradio as gr
from huggingface_hub import InferenceClient
# Initialize the Hugging Face Inference Client
client = InferenceClient()
# Function to stream content with Math, STEM, or Code Generation, including LaTeX
def generate_stream(selected_topic, input_text):
"""
Generates dynamic lessons, solutions, or code snippets with LaTeX-style formatting.
Args:
selected_topic (str): The selected subject (e.g., Math, STEM, or Code Generation).
input_text (str): Additional input for contextual content generation.
Yields:
str: Incremental content, including Markdown/LaTeX math formatting.
"""
# Create a topic-specific prompt
prompt = (
f"Generate a {selected_topic.lower()} lesson, problem, or example with step-by-step explanations and LaTeX math formatting based on the following input: {input_text}"
if input_text.strip()
else f"Generate a beginner-level {selected_topic.lower()} lesson with examples and LaTeX math formatting."
)
messages = [{"role": "user", "content": prompt}]
try:
# Create a stream for generating content
stream = client.chat.completions.create(
model="Qwen/Qwen2.5-Coder-32B-Instruct", # Streaming model
messages=messages,
temperature=0.5,
max_tokens=1024,
top_p=0.7,
stream=True,
)
# Stream the generated content incrementally
generated_content = ""
for chunk in stream:
generated_content += chunk.choices[0].delta.content
yield generated_content # Yield content incrementally
except Exception as e:
yield f"Error: {e}" # Display error if any issues occur
# Create the Gradio interface
with gr.Blocks() as app:
# App Title and Instructions
gr.Markdown("## πŸŽ“ STEM Learning and Code Generator with LaTeX")
gr.Markdown(
"Get dynamic lessons, problem-solving examples, or code snippets for Math, STEM, "
"or Computer Science. Includes LaTeX support for equations and step-by-step breakdowns!"
)
with gr.Row():
# Input Section
with gr.Column():
selected_topic = gr.Radio(
choices=["Math", "STEM", "Computer Science (Code Generation)"],
label="Select a Topic",
value="Math", # Default selection
)
input_text = gr.Textbox(
lines=2,
label="Optional Input",
placeholder="Provide additional context (e.g., 'Explain calculus basics' or 'Generate Python code for sorting').",
)
generate_button = gr.Button("Generate Content")
# Output Section
with gr.Column():
gr.Markdown("### Generated Content")
output_stream = gr.Markdown() # Removed placeholder argument
# Link the generate button to the streaming function
generate_button.click(
fn=generate_stream,
inputs=[selected_topic, input_text],
outputs=output_stream,
)
# Launch the Gradio app
app.launch()