ejschwartz's picture
queue
02ecf35
raw
history blame
275 Bytes
import gradio as gr
import transformers
pipe = transformers.pipeline("text2text-generation", model="ejschwartz/slade-x86-O3")
def predict(asm):
return pipe(asm, max_length=1024)
demo = gr.Interface(fn=predict, inputs="text", outputs="text")
demo.queue()
demo.launch()