IlyasMoutawwakil HF staff commited on
Commit
469d831
·
verified ·
1 Parent(s): 31822c1

Upload cuda_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -3,7 +3,7 @@
3
  "name": "cuda_training_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
- "version": "2.3.1+rocm5.7",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
@@ -11,7 +11,7 @@
11
  "model": "FacebookAI/roberta-base",
12
  "processor": "FacebookAI/roberta-base",
13
  "device": "cuda",
14
- "device_ids": "5",
15
  "seed": 42,
16
  "inter_op_num_threads": null,
17
  "intra_op_num_threads": null,
@@ -117,33 +117,33 @@
117
  "overall": {
118
  "memory": {
119
  "unit": "MB",
120
- "max_ram": 1276.616704,
121
  "max_global_vram": 68702.69952,
122
- "max_process_vram": 291224.39168,
123
  "max_reserved": 2707.423232,
124
  "max_allocated": 2497.88416
125
  },
126
  "latency": {
127
  "unit": "s",
128
  "count": 5,
129
- "total": 0.7140777702331543,
130
- "mean": 0.14281555404663088,
131
- "stdev": 0.19955275525061025,
132
- "p50": 0.042892650604248045,
133
- "p90": 0.34260464935302737,
134
- "p95": 0.4422626213073729,
135
- "p99": 0.5219889988708496,
136
  "values": [
137
- 0.5419205932617187,
138
- 0.043630733489990234,
139
- 0.04281521606445313,
140
- 0.042892650604248045,
141
- 0.04281857681274414
142
  ]
143
  },
144
  "throughput": {
145
  "unit": "samples/s",
146
- "value": 70.0203844515065
147
  },
148
  "energy": null,
149
  "efficiency": null
@@ -151,30 +151,30 @@
151
  "warmup": {
152
  "memory": {
153
  "unit": "MB",
154
- "max_ram": 1276.616704,
155
  "max_global_vram": 68702.69952,
156
- "max_process_vram": 291224.39168,
157
  "max_reserved": 2707.423232,
158
  "max_allocated": 2497.88416
159
  },
160
  "latency": {
161
  "unit": "s",
162
  "count": 2,
163
- "total": 0.585551326751709,
164
- "mean": 0.2927756633758545,
165
- "stdev": 0.24914492988586423,
166
- "p50": 0.2927756633758545,
167
- "p90": 0.4920916072845459,
168
- "p95": 0.5170061002731323,
169
- "p99": 0.5369376946640014,
170
  "values": [
171
- 0.5419205932617187,
172
- 0.043630733489990234
173
  ]
174
  },
175
  "throughput": {
176
  "unit": "samples/s",
177
- "value": 13.662337756758658
178
  },
179
  "energy": null,
180
  "efficiency": null
@@ -182,31 +182,31 @@
182
  "train": {
183
  "memory": {
184
  "unit": "MB",
185
- "max_ram": 1276.616704,
186
  "max_global_vram": 68702.69952,
187
- "max_process_vram": 291224.39168,
188
  "max_reserved": 2707.423232,
189
  "max_allocated": 2497.88416
190
  },
191
  "latency": {
192
  "unit": "s",
193
  "count": 3,
194
- "total": 0.12852644348144532,
195
- "mean": 0.04284214782714844,
196
- "stdev": 3.5737203102167795e-05,
197
- "p50": 0.04281857681274414,
198
- "p90": 0.04287783584594727,
199
- "p95": 0.04288524322509765,
200
- "p99": 0.042891169128417964,
201
  "values": [
202
- 0.04281521606445313,
203
- 0.042892650604248045,
204
- 0.04281857681274414
205
  ]
206
  },
207
  "throughput": {
208
  "unit": "samples/s",
209
- "value": 140.04900090928422
210
  },
211
  "energy": null,
212
  "efficiency": null
 
3
  "name": "cuda_training_transformers_multiple-choice_FacebookAI/roberta-base",
4
  "backend": {
5
  "name": "pytorch",
6
+ "version": "2.4.1+rocm6.1",
7
  "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
8
  "task": "multiple-choice",
9
  "library": "transformers",
 
11
  "model": "FacebookAI/roberta-base",
12
  "processor": "FacebookAI/roberta-base",
13
  "device": "cuda",
14
+ "device_ids": "4",
15
  "seed": 42,
16
  "inter_op_num_threads": null,
17
  "intra_op_num_threads": null,
 
117
  "overall": {
118
  "memory": {
119
  "unit": "MB",
120
+ "max_ram": 1644.875776,
121
  "max_global_vram": 68702.69952,
122
+ "max_process_vram": 0.0,
123
  "max_reserved": 2707.423232,
124
  "max_allocated": 2497.88416
125
  },
126
  "latency": {
127
  "unit": "s",
128
  "count": 5,
129
+ "total": 0.7170670967102051,
130
+ "mean": 0.14341341934204102,
131
+ "stdev": 0.19405611202544185,
132
+ "p50": 0.046274078369140624,
133
+ "p90": 0.3377336364746094,
134
+ "p95": 0.4346293304443358,
135
+ "p99": 0.5121458856201171,
136
  "values": [
137
+ 0.5315250244140625,
138
+ 0.04704655456542969,
139
+ 0.04611983871459961,
140
+ 0.046101600646972656,
141
+ 0.046274078369140624
142
  ]
143
  },
144
  "throughput": {
145
  "unit": "samples/s",
146
+ "value": 69.72848179674176
147
  },
148
  "energy": null,
149
  "efficiency": null
 
151
  "warmup": {
152
  "memory": {
153
  "unit": "MB",
154
+ "max_ram": 1644.875776,
155
  "max_global_vram": 68702.69952,
156
+ "max_process_vram": 0.0,
157
  "max_reserved": 2707.423232,
158
  "max_allocated": 2497.88416
159
  },
160
  "latency": {
161
  "unit": "s",
162
  "count": 2,
163
+ "total": 0.5785715789794922,
164
+ "mean": 0.2892857894897461,
165
+ "stdev": 0.24223923492431637,
166
+ "p50": 0.28928578948974604,
167
+ "p90": 0.4830771774291992,
168
+ "p95": 0.5073011009216308,
169
+ "p99": 0.5266802397155761,
170
  "values": [
171
+ 0.5315250244140625,
172
+ 0.04704655456542969
173
  ]
174
  },
175
  "throughput": {
176
  "unit": "samples/s",
177
+ "value": 13.82715689925648
178
  },
179
  "energy": null,
180
  "efficiency": null
 
182
  "train": {
183
  "memory": {
184
  "unit": "MB",
185
+ "max_ram": 1644.875776,
186
  "max_global_vram": 68702.69952,
187
+ "max_process_vram": 0.0,
188
  "max_reserved": 2707.423232,
189
  "max_allocated": 2497.88416
190
  },
191
  "latency": {
192
  "unit": "s",
193
  "count": 3,
194
+ "total": 0.1384955177307129,
195
+ "mean": 0.0461651725769043,
196
+ "stdev": 7.736713542005571e-05,
197
+ "p50": 0.04611983871459961,
198
+ "p90": 0.04624323043823242,
199
+ "p95": 0.046258654403686525,
200
+ "p99": 0.0462709935760498,
201
  "values": [
202
+ 0.04611983871459961,
203
+ 0.046101600646972656,
204
+ 0.046274078369140624
205
  ]
206
  },
207
  "throughput": {
208
  "unit": "samples/s",
209
+ "value": 129.9681050689217
210
  },
211
  "energy": null,
212
  "efficiency": null