File size: 514 Bytes
5ce5b99
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
from transformers import AutoTokenizer, AutoModel

# Laden des SciBERT-Modells
tokenizer = AutoTokenizer.from_pretrained('allenai/scibert_scivocab_uncased')
model = AutoModel.from_pretrained('allenai/scibert_scivocab_uncased')

# Beispiel-Text aus einem Paper
text = "This paper introduces a novel deep learning approach for cancer diagnosis."

# Tokenisierung & Modell-Durchlauf
inputs = tokenizer(text, return_tensors="pt")
outputs = model(**inputs)

# Anzeigen der Ausgabe
print(outputs.last_hidden_state.shape)