amyakir's picture
Create app.py
e34be93 verified
raw
history blame
665 Bytes
import gradio as gr
import torch
from transformers import pipeline
# Load Hugging Face text generation pipeline
pipe = pipeline("text-generation", model="HuggingFaceH4/zephyr-7b-beta", device_map="auto", torch_dtype=torch.float16)
# Function to generate questions
def generate_questions(text):
prompt = f"Ask 5 short, simple comprehension questions about this English coursebook text:\n\n{text}"
output = pipe(prompt, max_new_tokens=200, do_sample=True)[0]["generated_text"]
return output.strip()
# Gradio Interface
demo = gr.Interface(fn=generate_questions, inputs="textbox", outputs="textbox", title="Coursebook Question Generator")
demo.launch()