Spaces:
Running
Running
from vocab.gpt_35_turbo import tokenizer | |
print(tokenizer.decode([100256])) | |
print(tokenizer.convert_ids_to_tokens([100256])) |