File size: 5,311 Bytes
a14fe7d
473cdbb
43dcd71
a14fe7d
4febf46
473cdbb
a14fe7d
3f1966b
43dcd71
a14fe7d
3f1966b
65151e2
 
3f1966b
 
 
 
 
 
65151e2
43dcd71
3f1966b
473cdbb
3f1966b
4febf46
43dcd71
 
 
 
473cdbb
a14fe7d
4febf46
3f1966b
 
43dcd71
 
4febf46
 
 
43dcd71
4febf46
3f1966b
 
4febf46
3f1966b
43dcd71
3f1966b
 
 
43dcd71
3f1966b
43dcd71
 
 
3f1966b
43dcd71
3f1966b
 
4febf46
3f1966b
 
 
4febf46
65151e2
a14fe7d
3f1966b
 
4febf46
a14fe7d
a8ddaea
4febf46
 
43dcd71
4febf46
43dcd71
4febf46
3212e03
 
43dcd71
 
 
 
 
 
3212e03
4febf46
43dcd71
 
 
4febf46
3212e03
 
 
 
43dcd71
 
 
 
 
 
 
3212e03
65151e2
a14fe7d
a8ddaea
4febf46
a8ddaea
 
43dcd71
a14fe7d
a8ddaea
3f1966b
 
 
 
a8ddaea
4febf46
43dcd71
 
a8ddaea
 
 
43dcd71
 
a8ddaea
43dcd71
 
 
 
 
4febf46
43dcd71
a14fe7d
43dcd71
 
3212e03
43dcd71
 
 
 
3212e03
 
4febf46
a14fe7d
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
import gradio as gr
from huggingface_hub import InferenceClient
import tempfile

# Initialize the Hugging Face Inference Client
client = InferenceClient()

# Function to generate content dynamically
def generate_content(selected_topic, subtopic, complexity, input_text, examples_count, output_type):
    """
    Generate content dynamically based on user input with support for LaTeX and file downloads.

    Args:
        selected_topic (str): The selected topic (e.g., Math, STEM, Code Generation).
        subtopic (str): A specific subtopic for content generation.
        complexity (str): Expertise level (Beginner, Intermediate, Advanced).
        input_text (str): Additional context or problem to solve.
        examples_count (int): Number of examples or outputs to generate.
        output_type (str): Desired output format (Plain Text, LaTeX, Downloadable).

    Returns:
        tuple: Generated content and file path (if applicable).
    """
    # Create the prompt dynamically
    prompt = (
        f"Generate {examples_count} {complexity.lower()}-level {selected_topic.lower()} examples, lessons, "
        f"or problems related to {subtopic}. Context: {input_text}" if input_text.strip()
        else f"Generate {examples_count} {complexity.lower()}-level {selected_topic.lower()} lessons "
             f"or problems focused on {subtopic}."
    )

    try:
        # Generate content using the model
        messages = [{"role": "user", "content": prompt}]
        response = client.chat.completions.create(
            model="Qwen/Qwen2.5-Coder-32B-Instruct",
            messages=messages,
            temperature=0.5,
            max_tokens=1024,
            top_p=0.7
        )
        # Extract content from the response
        content = response.choices[0].message.content if response.choices else "No content generated."

        # Handle output formatting
        if output_type == "LaTeX":
            # Ensure LaTeX content is properly wrapped
            latex_content = f"$$\n{content.strip()}\n$$"
            return latex_content, None
        elif output_type == "Downloadable":
            # Save content to a temporary file
            temp_file = tempfile.NamedTemporaryFile(delete=False, suffix=".txt")
            with open(temp_file.name, "w") as file:
                file.write(content)
            return "File generated successfully. Use the download button.", temp_file.name
        else:
            # Default to plain text
            return content, None
    except Exception as e:
        # Catch and return any errors
        return f"Error during content generation: {e}", None


# Create the Gradio interface
with gr.Blocks() as app:
    # App Title and Description
    gr.Markdown("## 🌟 Advanced STEM and Code Generator with LaTeX and File Downloads")

    with gr.Row():
        # Input Section
        with gr.Column():
            selected_topic = gr.Radio(
                choices=["Math", "STEM", "Code Generation"],
                label="Select a Topic",
                value="Math"
            )
            subtopic = gr.Textbox(
                label="Subtopic",
                placeholder="E.g., Algebra, Physics, Sorting Algorithms"
            )
            complexity = gr.Radio(
                choices=["Beginner", "Intermediate", "Advanced"],
                label="Expertise Level",
                value="Beginner"
            )
            input_text = gr.Textbox(
                label="Additional Context",
                placeholder="E.g., 'Explain integration basics' or 'Generate Python code for searching.'",
                lines=3
            )
            examples_count = gr.Slider(
                minimum=1,
                maximum=5,
                step=1,
                label="Number of Examples",
                value=1
            )
            output_type = gr.Radio(
                choices=["Plain Text", "LaTeX", "Downloadable"],
                label="Output Format",
                value="Plain Text"
            )
            generate_button = gr.Button("Generate Content")

        # Output Section
        with gr.Column():
            gr.Markdown("### πŸ“ Generated Output (Supports LaTeX)")
            output_text = gr.Markdown(label="Generated Content")
            download_button = gr.File(label="Download File (if applicable)")

    # Connect the generate function to the button
    def update_output(result, file_path):
        if file_path:
            return result, file_path
        return result, None

    generate_button.click(
        fn=generate_content,
        inputs=[selected_topic, subtopic, complexity, input_text, examples_count, output_type],
        outputs=[output_text, download_button],
        preprocess=False,
        postprocess=update_output
    )

    # Feedback Section
    feedback_label = gr.Label(value="Was this content helpful?")
    feedback_rating = gr.Radio(
        choices=["Yes", "No"],
        label="Feedback",
        value="Yes"
    )
    feedback_button = gr.Button("Submit Feedback")

    def collect_feedback(feedback):
        return f"Thank you for your feedback: {feedback}"

    feedback_button.click(
        fn=collect_feedback,
        inputs=[feedback_rating],
        outputs=[feedback_label]
    )

# Launch the Gradio app
app.launch()