# app.py import gradio as gr from transformers import AutoTokenizer, AutoModel import torch model_name = "BM-K/KoSimCSE-roberta" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModel.from_pretrained(model_name) def embed(text): inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True) with torch.no_grad(): outputs = model(**inputs) embeddings = outputs.last_hidden_state[:, 0] return embeddings.tolist() iface = gr.Interface(fn=embed, inputs="text", outputs="text") iface.launch()