Sarmila commited on
Commit
e3d27fb
1 Parent(s): bdb6273

Training in progress, step 500

Browse files
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "microsoft/layoutlmv2-base-uncased",
3
  "architectures": [
4
- "LayoutLMv2ForTokenClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "convert_sync_batchnorm": true,
@@ -107,12 +107,14 @@
107
  "hidden_size": 768,
108
  "id2label": {
109
  "0": "O",
110
- "1": "B-HEADER",
111
- "2": "I-HEADER",
112
- "3": "B-QUESTION",
113
- "4": "I-QUESTION",
114
- "5": "B-ANSWER",
115
- "6": "I-ANSWER"
 
 
116
  },
117
  "image_feature_pool_shape": [
118
  7,
@@ -122,12 +124,14 @@
122
  "initializer_range": 0.02,
123
  "intermediate_size": 3072,
124
  "label2id": {
125
- "B-ANSWER": 5,
126
- "B-HEADER": 1,
127
- "B-QUESTION": 3,
128
- "I-ANSWER": 6,
129
- "I-HEADER": 2,
130
- "I-QUESTION": 4,
 
 
131
  "O": 0
132
  },
133
  "layer_norm_eps": 1e-12,
 
1
  {
2
  "_name_or_path": "microsoft/layoutlmv2-base-uncased",
3
  "architectures": [
4
+ "CustomModel"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "convert_sync_batchnorm": true,
 
107
  "hidden_size": 768,
108
  "id2label": {
109
  "0": "O",
110
+ "1": "B-COMPANY",
111
+ "2": "I-COMPANY",
112
+ "3": "B-DATE",
113
+ "4": "I-DATE",
114
+ "5": "B-ADDRESS",
115
+ "6": "I-ADDRESS",
116
+ "7": "B-TOTAL",
117
+ "8": "I-TOTAL"
118
  },
119
  "image_feature_pool_shape": [
120
  7,
 
124
  "initializer_range": 0.02,
125
  "intermediate_size": 3072,
126
  "label2id": {
127
+ "B-ADDRESS": 5,
128
+ "B-COMPANY": 1,
129
+ "B-DATE": 3,
130
+ "B-TOTAL": 7,
131
+ "I-ADDRESS": 6,
132
+ "I-COMPANY": 2,
133
+ "I-DATE": 4,
134
+ "I-TOTAL": 8,
135
  "O": 0
136
  },
137
  "layer_norm_eps": 1e-12,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9478d366f575aa5023133f07556e35260f03f9541b7c33a14efec3d12d4642f5
3
- size 802226155
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4330bc67a3708fa0292ed1d5425d934c1e9793f492a4525ae39295c82c7178a
3
+ size 802232299
runs/Apr19_09-13-39_personal-pod-gpu-m4zzzgj/events.out.tfevents.1650359625.personal-pod-gpu-m4zzzgj.16908.2 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:943480406898cd606ed9a96f6e25f8f81d62ae902d430d147cc5737a28349627
3
- size 6176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9e40122c34180e650762f3ce62110aee131255eae95aa6e431da6940fe1f164
3
+ size 6530
runs/Apr19_09-22-19_personal-pod-gpu-m4zzzgj/1650360148.6785583/events.out.tfevents.1650360148.personal-pod-gpu-m4zzzgj.17493.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba966c27e3f82065c9bb3f62972bd00ae04059463a753a6ccc6331ba07f26fd5
3
+ size 5069
runs/Apr19_09-22-19_personal-pod-gpu-m4zzzgj/1650360203.410056/events.out.tfevents.1650360203.personal-pod-gpu-m4zzzgj.17493.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c880e7ba301caebf8b47f363103b6bac44da7241231499daf0faf2da1bad6ba4
3
+ size 5069
runs/Apr19_09-22-19_personal-pod-gpu-m4zzzgj/events.out.tfevents.1650360148.personal-pod-gpu-m4zzzgj.17493.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95e8f1f003aa905d72226e2654dfa52e67154bc6aedf9b2be40c2acf113e3e9f
3
+ size 11977
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6506db78ac512470ca408dbe3299c7bbd7f95aa4b7c59271dca564bd73d3c35f
3
  size 3247
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecc62cd94958252c7991d1b6cf27788282670f686c359ce15848578f02b05956
3
  size 3247