Rachel Bawden commited on
Commit
5c06d8f
1 Parent(s): 82dbc5c

updated config

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -66,7 +66,7 @@
66
  "torchscript": false,
67
  "transformers_version": "4.12.3",
68
  "use_bfloat16": false,
69
- "vocab_size": 1001
70
  },
71
  "decoder_attention_heads": 8,
72
  "decoder_ffn_dim": 1024,
@@ -93,11 +93,11 @@
93
  "num_hidden_layers": 2,
94
  "pad_token_id": 1,
95
  "scale_embedding": true,
96
- "src_vocab_size": 1001,
97
- "tgt_vocab_size": 1001,
98
  "tie_word_embeddings": true,
99
  "torch_dtype": "float32",
100
  "transformers_version": null,
101
  "use_cache": true,
102
- "vocab_size": 1001
103
  }
 
66
  "torchscript": false,
67
  "transformers_version": "4.12.3",
68
  "use_bfloat16": false,
69
+ "vocab_size": 1000
70
  },
71
  "decoder_attention_heads": 8,
72
  "decoder_ffn_dim": 1024,
 
93
  "num_hidden_layers": 2,
94
  "pad_token_id": 1,
95
  "scale_embedding": true,
96
+ "src_vocab_size": 1000,
97
+ "tgt_vocab_size": 1000,
98
  "tie_word_embeddings": true,
99
  "torch_dtype": "float32",
100
  "transformers_version": null,
101
  "use_cache": true,
102
+ "vocab_size": 1000
103
  }