File size: 1,081 Bytes
e8f27bb
 
 
 
 
 
 
 
 
 
ceab98c
e8f27bb
 
 
 
 
 
 
ceab98c
e8f27bb
 
 
 
 
843f9d2
ceab98c
a91e6de
ceab98c
843f9d2
 
e8f27bb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import gradio as gr 
from transformers import AutoTokenizer, pipeline
import torch



tokenizer = AutoTokenizer.from_pretrained("akoksal/LongForm-OPT-2.7B") 
generate = pipeline('text-generation', model='akoksal/LongForm-OPT-2.7B', tokenizer=tokenizer)


def predict(instruction, topp, max_new_tokens, temperature):
    if "[EOI]" not in instruction:
        instruction = instruction + " [EOI]"
    x = generate(instruction,
        do_sample=True, 
        max_length=64, 
        top_p=topp, 
        num_return_sequences=1,
        max_new_tokens=max_new_tokens,
        temperature=temperature
    )[0]["generated_text"]
    
    return x

iface = gr.Interface(fn=predict, inputs=["text",
                                         gr.inputs.Slider(0, 3, default=0.90, label="top_p"),
                                         gr.inputs.Slider(0, 512, default=64, label="max_new_tokens"),
                                         gr.inputs.Slider(0, 1, default=1, label="temperature")
                                        ],
                     outputs="text")
iface.launch()