Transformer / app.py
flytoe's picture
Create app.py
5ce5b99 verified
raw
history blame
514 Bytes
from transformers import AutoTokenizer, AutoModel
# Laden des SciBERT-Modells
tokenizer = AutoTokenizer.from_pretrained('allenai/scibert_scivocab_uncased')
model = AutoModel.from_pretrained('allenai/scibert_scivocab_uncased')
# Beispiel-Text aus einem Paper
text = "This paper introduces a novel deep learning approach for cancer diagnosis."
# Tokenisierung & Modell-Durchlauf
inputs = tokenizer(text, return_tensors="pt")
outputs = model(**inputs)
# Anzeigen der Ausgabe
print(outputs.last_hidden_state.shape)