breadlicker45 commited on
Commit
7be9f88
·
1 Parent(s): 68debcd

Upload 3 files

Browse files
Files changed (2) hide show
  1. config.json +5 -5
  2. pytorch_model.bin +1 -1
config.json CHANGED
@@ -3,13 +3,13 @@
3
  "architectures": [
4
  "GPT2LMHeadModel"
5
  ],
6
- "attn_pdrop": 0.0,
7
  "bos_token_id": 0,
8
- "embd_pdrop": 0.0,
9
  "eos_token_id": 0,
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
12
- "line_by_line": false,
13
  "model_type": "gpt2",
14
  "n_ctx": 4096,
15
  "n_embd": 960,
@@ -18,11 +18,11 @@
18
  "n_layer": 5,
19
  "n_positions": 4096,
20
  "reorder_and_upcast_attn": false,
21
- "resid_pdrop": 0.0,
22
  "scale_attn_by_inverse_layer_idx": false,
23
  "scale_attn_weights": true,
24
  "summary_activation": null,
25
- "summary_first_dropout": 0.0,
26
  "summary_proj_to_labels": true,
27
  "summary_type": "cls_index",
28
  "summary_use_proj": true,
 
3
  "architectures": [
4
  "GPT2LMHeadModel"
5
  ],
6
+ "attn_pdrop": 0,
7
  "bos_token_id": 0,
8
+ "embd_pdrop": 0,
9
  "eos_token_id": 0,
10
  "initializer_range": 0.02,
11
  "layer_norm_epsilon": 1e-05,
12
+ "line_by_line": true,
13
  "model_type": "gpt2",
14
  "n_ctx": 4096,
15
  "n_embd": 960,
 
18
  "n_layer": 5,
19
  "n_positions": 4096,
20
  "reorder_and_upcast_attn": false,
21
+ "resid_pdrop": 0,
22
  "scale_attn_by_inverse_layer_idx": false,
23
  "scale_attn_weights": true,
24
  "summary_activation": null,
25
+ "summary_first_dropout": 0,
26
  "summary_proj_to_labels": true,
27
  "summary_type": "cls_index",
28
  "summary_use_proj": true,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:523c56f1bc2c61f213080188701ba3f3ddb4ddb4aaef0fe337c060783ab98f09
3
  size 367059983
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69138cb6a086c6f5f8331bb23ca7155845eb47e0ea922a63ffb0d15f1c45de4e
3
  size 367059983