yangheng commited on
Commit
4233b69
1 Parent(s): dae27f9

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +18 -9
config.json CHANGED
@@ -2,24 +2,33 @@
2
  "OmniGenomefold_config": null,
3
  "_name_or_path": "./",
4
  "architectures": [
5
- "OmniGenomeForTokenClassification"
 
 
 
 
 
 
6
  ],
7
  "attention_probs_dropout_prob": 0.0,
8
- "auto_map": {
9
- "AutoConfig": "configuration_omnigenome.OmniGenomeConfig",
10
- "AutoModel": "modeling_omnigenome.OmniGenomeModel",
11
- "AutoModelForMaskedLM": "modeling_omnigenome.OmniGenomeForMaskedLM",
12
- "AutoModelForSeq2SeqLM": "modeling_omnigenome.OmniGenomeForSeq2SeqLM",
13
- "AutoModelForTokenClassification": "modeling_omnigenome.OmniGenomeForTokenClassification"
14
- },
15
  "classifier_dropout": null,
16
  "emb_layer_norm_before": false,
17
  "hidden_act": "gelu",
18
  "hidden_dropout_prob": 0,
19
  "hidden_size": 720,
 
 
 
 
 
20
  "initializer_range": 0.02,
21
  "intermediate_size": 2560,
22
  "is_folding_model": false,
 
 
 
 
 
23
  "layer_norm_eps": 1e-05,
24
  "mask_token_id": 23,
25
  "max_position_embeddings": 1026,
@@ -32,7 +41,7 @@
32
  "position_embedding_type": "rotary",
33
  "token_dropout": true,
34
  "torch_dtype": "float32",
35
- "transformers_version": "4.38.0.dev0",
36
  "use_cache": true,
37
  "vocab_list": null,
38
  "vocab_size": 24
 
2
  "OmniGenomefold_config": null,
3
  "_name_or_path": "./",
4
  "architectures": [
5
+ "OmniGenomeModel",
6
+ "OmniGenomeForTokenClassification",
7
+ "OmniGenomeForMaskedLM",
8
+ "OmniGenomeModelForSeq2SeqLM",
9
+ "OmniGenomeForTSequenceClassification",
10
+ "OmniGenomeForTokenClassification",
11
+ "OmniGenomeForSeq2SeqLM"
12
  ],
13
  "attention_probs_dropout_prob": 0.0,
 
 
 
 
 
 
 
14
  "classifier_dropout": null,
15
  "emb_layer_norm_before": false,
16
  "hidden_act": "gelu",
17
  "hidden_dropout_prob": 0,
18
  "hidden_size": 720,
19
+ "id2label": {
20
+ "0": "(",
21
+ "1": ")",
22
+ "2": "."
23
+ },
24
  "initializer_range": 0.02,
25
  "intermediate_size": 2560,
26
  "is_folding_model": false,
27
+ "label2id": {
28
+ "(": 0,
29
+ ")": 1,
30
+ ".": 2
31
+ },
32
  "layer_norm_eps": 1e-05,
33
  "mask_token_id": 23,
34
  "max_position_embeddings": 1026,
 
41
  "position_embedding_type": "rotary",
42
  "token_dropout": true,
43
  "torch_dtype": "float32",
44
+ "transformers_version": "4.41.0.dev0",
45
  "use_cache": true,
46
  "vocab_list": null,
47
  "vocab_size": 24