NeonBohdan commited on
Commit
75b86f7
1 Parent(s): 6b8ad37

Max seq len = 8192

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. tokenizer_config.json +1 -1
config.json CHANGED
@@ -10,7 +10,7 @@
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 14336,
13
- "max_position_embeddings": 32768,
14
  "model_type": "mistral",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
 
10
  "hidden_size": 4096,
11
  "initializer_range": 0.02,
12
  "intermediate_size": 14336,
13
+ "max_position_embeddings": 8192,
14
  "model_type": "mistral",
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
tokenizer_config.json CHANGED
@@ -290,7 +290,7 @@
290
  "clean_up_tokenization_spaces": true,
291
  "eos_token": "<|im_end|>",
292
  "legacy": true,
293
- "model_max_length": 1000000000000000019884624838656,
294
  "pad_token": "</s>",
295
  "sp_model_kwargs": {},
296
  "spaces_between_special_tokens": false,
 
290
  "clean_up_tokenization_spaces": true,
291
  "eos_token": "<|im_end|>",
292
  "legacy": true,
293
+ "model_max_length": 8192,
294
  "pad_token": "</s>",
295
  "sp_model_kwargs": {},
296
  "spaces_between_special_tokens": false,