baymax591 commited on
Commit
8b982e2
1 Parent(s): b1c9acf

For detailed information on the error, please refer to <u>https://huggingface.co/openbmb/MiniCPM-V/discussions/4</u>.

Files changed (1) hide show
  1. config.json +2 -0
config.json CHANGED
@@ -24,6 +24,7 @@
24
  "num_attention_heads": 36,
25
  "num_hidden_layers": 40,
26
  "num_key_value_heads": 36,
 
27
  "pretraining_tp": 1,
28
  "query_num": 64,
29
  "rms_norm_eps": 1e-05,
@@ -31,6 +32,7 @@
31
  "rope_theta": 10000.0,
32
  "scale_depth": 1.4,
33
  "scale_emb": 12,
 
34
  "tie_word_embeddings": false,
35
  "torch_dtype": "bfloat16",
36
  "transformers_version": "4.36.0",
 
24
  "num_attention_heads": 36,
25
  "num_hidden_layers": 40,
26
  "num_key_value_heads": 36,
27
+ "patch_size": 14,
28
  "pretraining_tp": 1,
29
  "query_num": 64,
30
  "rms_norm_eps": 1e-05,
 
32
  "rope_theta": 10000.0,
33
  "scale_depth": 1.4,
34
  "scale_emb": 12,
35
+ "scale_resolution": 448,
36
  "tie_word_embeddings": false,
37
  "torch_dtype": "bfloat16",
38
  "transformers_version": "4.36.0",