nordic-roberta-wiki / save_model.py
birgermoell's picture
Model params
e22bb9d
raw
history blame
575 Bytes
from transformers.modeling_flax_pytorch_utils import load_flax_checkpoint_in_pytorch_model
from transformers import RobertaConfig, RobertaModel
config = RobertaConfig.from_pretrained("./")
model = RobertaModel(config)
load_flax_checkpoint_in_pytorch_model(model, "./flax_model.msgpack")
model.save_pretrained("./")
from transformers import AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("./")
tokenizer.save_pretrained("./")
('./tokenizer_config.json',
'./special_tokens_map.json',
'./vocab.json',
'./merges.txt',
'./added_tokens.json',
'./tokenizer.json')