File size: 202 Bytes
3fe0c05
 
 
 
 
 
1
2
3
4
5
6
7
from transformers import BertTokenizerFast
from .tokenization import NewTokenizer

class NewTokenizerFast(BertTokenizerFast):
    slow_tokenizer_class = NewTokenizer
    special_attribute_present =True