File size: 126 Bytes
f4973d4
 
 
 
 
 
1
2
3
4
5
6

from vocab.gpt_35_turbo import tokenizer

print(tokenizer.decode([100256]))

print(tokenizer.convert_ids_to_tokens([100256]))