applied-ai-018's picture
Add files using upload-large-folder tool
c8ef0a6 verified
raw
history blame contribute delete
329 Bytes
from tokenizers import Tokenizer, decoders, models, pre_tokenizers, processors, trainers
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained('Tensoic/Kan-LLaMA-7B-base')
print(tokenizer)
st="ಮಾಡುವುದು ಮಾಡಲು"
print(tokenizer.convert_ids_to_tokens(tokenizer(st)['input_ids']))