Rocketknight1 HF staff commited on
Commit
5fbb665
·
1 Parent(s): 615ebad

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,16 +1,18 @@
1
  {
2
- "_name_or_path": "Rocketknight1/esm2_t12_35M_UR50D",
3
  "architectures": [
4
  "EsmForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.0,
7
  "classifier_dropout": null,
8
  "emb_layer_norm_before": false,
 
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.0,
11
  "hidden_size": 480,
12
  "initializer_range": 0.02,
13
  "intermediate_size": 1920,
 
14
  "layer_norm_eps": 1e-05,
15
  "mask_token_id": 32,
16
  "max_position_embeddings": 1026,
@@ -22,7 +24,8 @@
22
  "problem_type": "single_label_classification",
23
  "token_dropout": true,
24
  "torch_dtype": "float32",
25
- "transformers_version": "4.23.0.dev0",
26
  "use_cache": true,
 
27
  "vocab_size": 33
28
  }
 
1
  {
2
+ "_name_or_path": "facebook/esm2_t12_35M_UR50D",
3
  "architectures": [
4
  "EsmForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.0,
7
  "classifier_dropout": null,
8
  "emb_layer_norm_before": false,
9
+ "esmfold_config": null,
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.0,
12
  "hidden_size": 480,
13
  "initializer_range": 0.02,
14
  "intermediate_size": 1920,
15
+ "is_folding_model": false,
16
  "layer_norm_eps": 1e-05,
17
  "mask_token_id": 32,
18
  "max_position_embeddings": 1026,
 
24
  "problem_type": "single_label_classification",
25
  "token_dropout": true,
26
  "torch_dtype": "float32",
27
+ "transformers_version": "4.25.0.dev0",
28
  "use_cache": true,
29
+ "vocab_list": null,
30
  "vocab_size": 33
31
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7613d148dc9a3f11a459b9d024f59c36eea21e772d6f310e7c6a21abb835ac66
3
- size 136054689
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f571c39823477a6bde632167d7e2af830e0450278fa7d9dd3a486c5e1f916b1b
3
+ size 136052887
runs/Nov03_14-39-23_matt-TRX40-AORUS-PRO-WIFI/1667486370.29531/events.out.tfevents.1667486370.matt-TRX40-AORUS-PRO-WIFI.44140.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4473846db48d0506346b5987023155dc4cd63b1657be31b0c97a15da88afbead
3
+ size 5566
runs/Nov03_14-39-23_matt-TRX40-AORUS-PRO-WIFI/events.out.tfevents.1667486370.matt-TRX40-AORUS-PRO-WIFI.44140.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e8b2fae2127e645e4488168691c78245fd8d9706b41b9212e907bf9d483c5cb
3
+ size 4255
runs/Sep30_13-50-03_matt-TRX40-AORUS-PRO-WIFI/events.out.tfevents.1664542214.matt-TRX40-AORUS-PRO-WIFI.28778.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:80d9936fafd44449fa743ecd32019950cb5b887760967cfa83c23cc220badc4b
3
- size 5144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b934941d5c9c3d121f82642937e86121257e812e1f8ed4815fb6bd13b9dcba64
3
+ size 5498
tokenizer_config.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
- "name_or_path": "Rocketknight1/esm2_t12_35M_UR50D",
3
- "special_tokens_map_file": "/home/matt/.cache/huggingface/hub/models--Rocketknight1--esm2_t12_35M_UR50D/snapshots/dcbfb6f68520d5f0eacf3999b7ce707f546173f5/special_tokens_map.json",
 
4
  "tokenizer_class": "EsmTokenizer"
5
  }
 
1
  {
2
+ "model_max_length": 1024,
3
+ "name_or_path": "facebook/esm2_t12_35M_UR50D",
4
+ "special_tokens_map_file": "/home/matt/.cache/huggingface/hub/models--facebook--esm2_t12_35M_UR50D/snapshots/dbb5b2b74bf5bd9cd0ab5c2b95ef3994f69879a3/special_tokens_map.json",
5
  "tokenizer_class": "EsmTokenizer"
6
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0797aaaa0a3232cd019addaeda84ab4f7c71be252b0c6a9b099e753e20a8c5d2
3
- size 3451
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a83d4007c6e9d28f6b996518a23be038d8a649b6bae6e04753c81ab9045bfd1a
3
+ size 3439