|
import gradio as gr |
|
from transformers import pipeline, set_seed |
|
from transformers import GPTNeoForCausalLM, GPT2Tokenizer |
|
|
|
model = GPTNeoForCausalLM.from_pretrained("EleutherAI/gpt-neo-1.3B") |
|
tokenizer = GPT2Tokenizer.from_pretrained("EleutherAI/gpt-neo-1.3B") |
|
|
|
set_seed(42) |
|
|
|
|
|
def Bemenet(bemenet): |
|
input_ids = tokenizer(bemenet, return_tensors="pt").input_ids |
|
|
|
gen_tokens = model.generate( |
|
input_ids, |
|
do_sample=True, |
|
temperature=0.9, |
|
max_length=100, |
|
) |
|
return tokenizer.batch_decode(gen_tokens)[0] |
|
|
|
|
|
interface = gr.Interface(fn=Bemenet, |
|
title="Cím..", |
|
description="Leírás..", |
|
inputs="text", |
|
outputs="text") |
|
|
|
interface.launch() |