pere's picture
another attempt
b7ec3b6
raw
history blame contribute delete
565 Bytes
from transformers.modeling_flax_pytorch_utils import load_flax_checkpoint_in_pytorch_model
from transformers import RobertaConfig, RobertaModel
config = RobertaConfig.from_pretrained("./")
model = RobertaModel(config)
load_flax_checkpoint_in_pytorch_model(model, "./flax_model.msgpack")
model.save_pretrained("./")
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("./")
tokenizer.save_pretrained("./")
('./tokenizer_config.json','./special_tokens_map.json', './vocab.json', './merges.txt','./added_tokens.json','./tokenizer.json')