lhallee commited on
Commit
1ffa8b1
·
verified ·
1 Parent(s): 1048eed

Upload FastEsmForMaskedLM

Browse files
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -28,6 +28,7 @@
28
  "num_hidden_layers": 36,
29
  "pad_token_id": 1,
30
  "position_embedding_type": "rotary",
 
31
  "token_dropout": true,
32
  "torch_dtype": "float32",
33
  "transformers_version": "4.47.1",
 
28
  "num_hidden_layers": 36,
29
  "pad_token_id": 1,
30
  "position_embedding_type": "rotary",
31
+ "tie_word_embeddings": false,
32
  "token_dropout": true,
33
  "torch_dtype": "float32",
34
  "transformers_version": "4.47.1",