Spaces:
Runtime error
Runtime error
import gradio as gr | |
import torch | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
device = "cuda" if torch.cuda.is_available() else "cpu" | |
model_path = "ibm-granite/granite-3b-code-base" | |
tokenizer = AutoTokenizer.from_pretrained(model_path) | |
model = AutoModelForCausalLM.from_pretrained(model_path, device_map=device) | |
model.eval() | |
def generate_code(input_text): | |
input_tokens = tokenizer(input_text, return_tensors="pt") | |
for i in input_tokens: | |
input_tokens[i] = input_tokens[i].to(device) | |
output = model.generate(**input_tokens, max_new_tokens=200) | |
output_text = tokenizer.batch_decode(output, skip_special_tokens=True)[0] | |
return output_text | |
# Gradio Interface | |
iface = gr.Interface( | |
fn=generate_code, | |
inputs=gr.inputs.Textbox(lines=2, placeholder="Enter code snippet here..."), | |
outputs="text" | |
) | |
iface.launch() | |