bms18 commited on
Commit
9ea0017
1 Parent(s): dda0aa0

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "facebook/esm2_t30_150M_UR50D",
3
  "architectures": [
4
  "EsmForSequenceClassification"
5
  ],
@@ -9,25 +9,19 @@
9
  "esmfold_config": null,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.0,
12
- "hidden_size": 640,
13
- "id2label": {
14
- "0": "LABEL_0"
15
- },
16
  "initializer_range": 0.02,
17
- "intermediate_size": 2560,
18
  "is_folding_model": false,
19
- "label2id": {
20
- "LABEL_0": 0
21
- },
22
  "layer_norm_eps": 1e-05,
23
  "mask_token_id": 32,
24
  "max_position_embeddings": 1026,
25
  "model_type": "esm",
26
  "num_attention_heads": 20,
27
- "num_hidden_layers": 30,
28
  "pad_token_id": 1,
29
  "position_embedding_type": "rotary",
30
- "problem_type": "regression",
31
  "token_dropout": true,
32
  "torch_dtype": "float32",
33
  "transformers_version": "4.39.3",
 
1
  {
2
+ "_name_or_path": "facebook/esm2_t6_8M_UR50D",
3
  "architectures": [
4
  "EsmForSequenceClassification"
5
  ],
 
9
  "esmfold_config": null,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.0,
12
+ "hidden_size": 320,
 
 
 
13
  "initializer_range": 0.02,
14
+ "intermediate_size": 1280,
15
  "is_folding_model": false,
 
 
 
16
  "layer_norm_eps": 1e-05,
17
  "mask_token_id": 32,
18
  "max_position_embeddings": 1026,
19
  "model_type": "esm",
20
  "num_attention_heads": 20,
21
+ "num_hidden_layers": 6,
22
  "pad_token_id": 1,
23
  "position_embedding_type": "rotary",
24
+ "problem_type": "multi_label_classification",
25
  "token_dropout": true,
26
  "torch_dtype": "float32",
27
  "transformers_version": "4.39.3",
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:817378bead6ec0b3a20e40e72e27df2ba89e090c5743f909a6ec171deae00811
3
- size 595246640
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:baa8b862aedecaf6669617f3456b04b9e435007f0142a00713c42b85839b81e6
3
+ size 31375788
runs/Jul28_16-53-18_DESKTOP-M4AOP5S/events.out.tfevents.1722181999.DESKTOP-M4AOP5S.15308.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:717ce87c3769811709136f7fd975879bef597f33efb483b48b21d1f9d713f8ea
3
+ size 4682
runs/Jul28_16-56-32_DESKTOP-M4AOP5S/events.out.tfevents.1722182193.DESKTOP-M4AOP5S.15308.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:292395bf7ff036950308c2916e3ed55d3c3ac8e19f7e23d446fa17b7e2380bee
3
+ size 4682
runs/Jul28_16-57-48_DESKTOP-M4AOP5S/events.out.tfevents.1722182268.DESKTOP-M4AOP5S.15308.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c26c67baca49a913ab0da9f872c9d038b7c224db58983d56f4d7aa8652710f8
3
+ size 5063
tokenizer_config.json CHANGED
@@ -45,7 +45,7 @@
45
  "cls_token": "<cls>",
46
  "eos_token": "<eos>",
47
  "mask_token": "<mask>",
48
- "model_max_length": 1000000000000000019884624838656,
49
  "pad_token": "<pad>",
50
  "tokenizer_class": "EsmTokenizer",
51
  "unk_token": "<unk>"
 
45
  "cls_token": "<cls>",
46
  "eos_token": "<eos>",
47
  "mask_token": "<mask>",
48
+ "model_max_length": 1024,
49
  "pad_token": "<pad>",
50
  "tokenizer_class": "EsmTokenizer",
51
  "unk_token": "<unk>"
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:33d24373f7936c0b9d17a25a934239c7fd92bfa122fae23690bbbce815725efa
3
  size 4920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2bc7710f28812e5cf9aa4f21eaaf3b29d23d5b2d114f3540a929acde9323c200
3
  size 4920