yuchenxie commited on
Commit
b761247
·
verified ·
1 Parent(s): fcc8952

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +19 -2
config.json CHANGED
@@ -2,6 +2,7 @@
2
  "model_type": "ArlowGPT",
3
  "clip_config": {
4
  "architectures": ["CLIPModel"],
 
5
  "hidden_size": 1024,
6
  "intermediate_size": 4096,
7
  "num_hidden_layers": 24,
@@ -11,7 +12,15 @@
11
  "vision_patch_size": 14,
12
  "projection_dim": 768,
13
  "max_position_embeddings": 77,
14
- "image_size": 224
 
 
 
 
 
 
 
 
15
  },
16
  "gpt2_config": {
17
  "architectures": ["GPT2LMHeadModel"],
@@ -25,7 +34,15 @@
25
  "resid_pdrop": 0.1,
26
  "embd_pdrop": 0.1,
27
  "layer_norm_epsilon": 1e-05,
28
- "initializer_range": 0.02
 
 
 
 
 
 
 
 
29
  },
30
  "projection_dim": 768,
31
  "transformers_version": "4.47.0",
 
2
  "model_type": "ArlowGPT",
3
  "clip_config": {
4
  "architectures": ["CLIPModel"],
5
+ "_name_or_path": "yuchenxie/ArlowGPT-VLM-Untrained",
6
  "hidden_size": 1024,
7
  "intermediate_size": 4096,
8
  "num_hidden_layers": 24,
 
12
  "vision_patch_size": 14,
13
  "projection_dim": 768,
14
  "max_position_embeddings": 77,
15
+ "image_size": 224,
16
+ "vision_config_dict": {
17
+ "hidden_size": 1024,
18
+ "intermediate_size": 4096,
19
+ "num_attention_heads": 16,
20
+ "num_hidden_layers": 24,
21
+ "patch_size": 14,
22
+ "projection_dim": 768
23
+ }
24
  },
25
  "gpt2_config": {
26
  "architectures": ["GPT2LMHeadModel"],
 
34
  "resid_pdrop": 0.1,
35
  "embd_pdrop": 0.1,
36
  "layer_norm_epsilon": 1e-05,
37
+ "initializer_range": 0.02,
38
+ "activation_function": "gelu_new",
39
+ "task_specific_params": {
40
+ "text-generation": {
41
+ "do_sample": true,
42
+ "max_length": 50
43
+ }
44
+ },
45
+ "transformers_version": "4.47.0"
46
  },
47
  "projection_dim": 768,
48
  "transformers_version": "4.47.0",