Luis Oala
Update app.py
b936af7
raw
history blame
1.61 kB
#TODO: integrate into markdown
import gradio as gr
from gradio import mix
from transformers import pipeline, set_seed
#title = "trustworthy artificial intelligence workshop - content generator"
description = "based on the gpt2 demo interface by <a href='https://huggingface.co/spaces/docs-demos/gpt2/tree/main'>ahsen khaliq</a>"
#io1 = gr.Interface.load("huggingface/distilgpt2")
generator = pipeline('text-generation', model='gpt2')
#io2 = gr.Interface.load("huggingface/gpt2-large")
#io3 = gr.Interface.load("huggingface/gpt2-medium")
#io4 = gr.Interface.load("huggingface/gpt2-xl")
def inference(text, seed):
"""
if model == "gpt2-large":
outtext = io2(text)
elif model == "gpt2-medium":
outtext = io3(text)
elif model == "gpt2-xl":
outtext = io4(text)
else:
outtext = io1(text)
"""
#outtext = io2(text)
set_seed(int(seed))
outtext = generator(text, max_length=100, num_return_sequences=1)['generated_text'] #get the string from the return dict with key 'generated text'
return outtext
gr.Interface(
inference,
[gr.inputs.Radio(choices=["trustworthy artificial intelligence"], label="input"), gr.inputs.Slider(minimum=0., maximum=1000.,label="seed")],
#,gr.inputs.Dropdown(choices=["distilgpt2","gpt2-medium","gpt2-large","gpt2-xl"], type="value", default="gpt2-medium", label="model")],
gr.outputs.Textbox(label="gpt-2 proposal"),
#title=title,
#description=description,
cache_examples=True).launch(enable_queue=True,
allow_flagging="manual")
#TODO: add credits at bottom