File size: 194 Bytes
073ed17
 
5339b17
073ed17
 
 
 
1
2
3
4
5
6
7
8
from transformers import GPT2Config

model_dir = "./sihala-gpt2"

config = GPT2Config.from_pretrained("gpt2", resid_pdrop=0.0, embd_pdrop=0.0, attn_pdrop=0.0)
config.save_pretrained(model_dir)