Sifal commited on
Commit
fc969d6
·
verified ·
1 Parent(s): 15c9d82

Upload config

Browse files
Files changed (1) hide show
  1. config.json +4 -5
config.json CHANGED
@@ -6,11 +6,10 @@
6
  ],
7
  "attention_probs_dropout_prob": 0.1,
8
  "auto_map": {
9
- "AutoConfig": "configuration_bert.BertConfig",
10
- "AutoModelForSequenceClassification":"automodel.ClinicalMosaicForSequenceClassification",
11
- "AutoModel": "automodel.ClinicalMosaicForEmbeddingGeneration"
12
  },
13
-
14
  "classifier_dropout": null,
15
  "gradient_checkpointing": false,
16
  "hidden_act": "gelu",
@@ -26,7 +25,7 @@
26
  "pad_token_id": 0,
27
  "position_embedding_type": "absolute",
28
  "torch_dtype": "float32",
29
- "transformers_version": "4.41.1",
30
  "type_vocab_size": 2,
31
  "use_cache": true,
32
  "vocab_size": 30528
 
6
  ],
7
  "attention_probs_dropout_prob": 0.1,
8
  "auto_map": {
9
+ "AutoConfig": "Sifal/ClinicalMosaic--configuration_bert.BertConfig",
10
+ "AutoModel": "Sifal/ClinicalMosaic--automodel.ClinicalMosaicForEmbeddingGeneration",
11
+ "AutoModelForSequenceClassification": "Sifal/ClinicalMosaic--automodel.ClinicalMosaicForSequenceClassification"
12
  },
 
13
  "classifier_dropout": null,
14
  "gradient_checkpointing": false,
15
  "hidden_act": "gelu",
 
25
  "pad_token_id": 0,
26
  "position_embedding_type": "absolute",
27
  "torch_dtype": "float32",
28
+ "transformers_version": "4.48.3",
29
  "type_vocab_size": 2,
30
  "use_cache": true,
31
  "vocab_size": 30528