Cheng98 commited on
Commit
288d97a
1 Parent(s): bf32b61

Upload config

Browse files
Files changed (1) hide show
  1. config.json +6 -6
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/opt-125m",
3
  "_remove_final_layer_norm": false,
4
  "activation_dropout": 0.0,
5
  "activation_function": "relu",
@@ -13,16 +13,16 @@
13
  "enable_bias": true,
14
  "eos_token_id": 2,
15
  "ffn_dim": 3072,
16
- "finetuning_task": "text-classification",
17
  "hidden_size": 768,
18
  "id2label": {
19
- "0": "0",
20
- "1": "1"
21
  },
22
  "init_std": 0.02,
23
  "label2id": {
24
- "0": 0,
25
- "1": 1
26
  },
27
  "layer_norm_elementwise_affine": true,
28
  "layerdrop": 0.0,
 
1
  {
2
+ "_name_or_path": "./checkpoints/opt-125m-qnli",
3
  "_remove_final_layer_norm": false,
4
  "activation_dropout": 0.0,
5
  "activation_function": "relu",
 
13
  "enable_bias": true,
14
  "eos_token_id": 2,
15
  "ffn_dim": 3072,
16
+ "finetuning_task": "qnli",
17
  "hidden_size": 768,
18
  "id2label": {
19
+ "0": "entailment",
20
+ "1": "not_entailment"
21
  },
22
  "init_std": 0.02,
23
  "label2id": {
24
+ "entailment": 0,
25
+ "not_entailment": 1
26
  },
27
  "layer_norm_elementwise_affine": true,
28
  "layerdrop": 0.0,