import gradio as gr from ABSA.inference import model, tokenizer, label2id, id2label import torch def predict(text): inputs = tokenizer(text, return_tensors="pt", truncation=True, padding="max_length", max_length=128) with torch.no_grad(): outputs = model(**inputs) preds = outputs["logits"].squeeze(0).tolist() labels = [id2label.get(p, "O") for p in preds] tokens = tokenizer.tokenize(text) return list(zip(tokens, labels)) gr.Interface(fn=predict, inputs="text", outputs="json", title="Arabic ABSA Model").launch()