nintwentydo commited on
Commit
421f7db
·
verified ·
1 Parent(s): 08490ac

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +5 -2
config.json CHANGED
@@ -8,6 +8,7 @@
8
  "image_token_index": 10,
9
  "model_type": "llava",
10
  "projector_hidden_act": "gelu",
 
11
  "quantization_config": {
12
  "config_groups": {
13
  "group_0": {
@@ -677,11 +678,13 @@
677
  "head_dim": 128,
678
  "is_composition": true,
679
  "max_position_embeddings": 1024000,
 
680
  "model_type": "mistral",
681
  "num_hidden_layers": 40,
 
682
  "num_key_value_heads": 8,
683
- "rms_norm_eps": 0.00001,
684
- "rope_theta": 1000000000,
685
  "sliding_window": null,
686
  "vocab_size": 131072
687
  },
 
8
  "image_token_index": 10,
9
  "model_type": "llava",
10
  "projector_hidden_act": "gelu",
11
+ "initializer_range": 0.02,
12
  "quantization_config": {
13
  "config_groups": {
14
  "group_0": {
 
678
  "head_dim": 128,
679
  "is_composition": true,
680
  "max_position_embeddings": 1024000,
681
+ "is_composition": true,
682
  "model_type": "mistral",
683
  "num_hidden_layers": 40,
684
+ "num_attention_heads": 32,
685
  "num_key_value_heads": 8,
686
+ "rms_norm_eps": 1e-05,
687
+ "rope_theta": 1000000000.0,
688
  "sliding_window": null,
689
  "vocab_size": 131072
690
  },