akoksal's picture
Update app.py
a91e6de
raw
history blame
1.08 kB
import gradio as gr
from transformers import AutoTokenizer, pipeline
import torch
tokenizer = AutoTokenizer.from_pretrained("akoksal/LongForm-OPT-2.7B")
generate = pipeline('text-generation', model='akoksal/LongForm-OPT-2.7B', tokenizer=tokenizer)
def predict(instruction, topp, max_new_tokens, temperature):
if "[EOI]" not in instruction:
instruction = instruction + " [EOI]"
x = generate(instruction,
do_sample=True,
max_length=64,
top_p=topp,
num_return_sequences=1,
max_new_tokens=max_new_tokens,
temperature=temperature
)[0]["generated_text"]
return x
iface = gr.Interface(fn=predict, inputs=["text",
gr.inputs.Slider(0, 3, default=0.90, label="top_p"),
gr.inputs.Slider(0, 512, default=64, label="max_new_tokens"),
gr.inputs.Slider(0, 1, default=1, label="temperature")
],
outputs="text")
iface.launch()