IlyasMoutawwakil HF staff commited on
Commit
8e1bba1
·
verified ·
1 Parent(s): e96f83c

Upload cuda_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json with huggingface_hub

Browse files
cuda_training_transformers_multiple-choice_FacebookAI/roberta-base/benchmark.json CHANGED
@@ -107,7 +107,7 @@
107
  "overall": {
108
  "memory": {
109
  "unit": "MB",
110
- "max_ram": 1106.919424,
111
  "max_global_vram": 3376.939008,
112
  "max_process_vram": 0.0,
113
  "max_reserved": 2730.491904,
@@ -116,24 +116,24 @@
116
  "latency": {
117
  "unit": "s",
118
  "count": 5,
119
- "total": 1.0190295333862305,
120
- "mean": 0.2038059066772461,
121
- "stdev": 0.2574071663262077,
122
- "p50": 0.0752363510131836,
123
- "p90": 0.46158481140136726,
124
- "p95": 0.5901017318725584,
125
- "p99": 0.6929152682495117,
126
  "values": [
127
- 0.71861865234375,
128
- 0.07603404998779296,
129
- 0.0752363510131836,
130
- 0.07511347198486328,
131
- 0.07402700805664063
132
  ]
133
  },
134
  "throughput": {
135
  "unit": "samples/s",
136
- "value": 49.06629137023166
137
  },
138
  "energy": null,
139
  "efficiency": null
@@ -141,7 +141,7 @@
141
  "warmup": {
142
  "memory": {
143
  "unit": "MB",
144
- "max_ram": 1106.919424,
145
  "max_global_vram": 3376.939008,
146
  "max_process_vram": 0.0,
147
  "max_reserved": 2730.491904,
@@ -150,21 +150,21 @@
150
  "latency": {
151
  "unit": "s",
152
  "count": 2,
153
- "total": 0.794652702331543,
154
- "mean": 0.3973263511657715,
155
- "stdev": 0.3212923011779785,
156
- "p50": 0.3973263511657715,
157
- "p90": 0.6543601921081543,
158
- "p95": 0.6864894222259521,
159
- "p99": 0.7121928063201904,
160
  "values": [
161
- 0.71861865234375,
162
- 0.07603404998779296
163
  ]
164
  },
165
  "throughput": {
166
  "unit": "samples/s",
167
- "value": 10.067291002129204
168
  },
169
  "energy": null,
170
  "efficiency": null
@@ -172,7 +172,7 @@
172
  "train": {
173
  "memory": {
174
  "unit": "MB",
175
- "max_ram": 1106.919424,
176
  "max_global_vram": 3376.939008,
177
  "max_process_vram": 0.0,
178
  "max_reserved": 2730.491904,
@@ -181,22 +181,22 @@
181
  "latency": {
182
  "unit": "s",
183
  "count": 3,
184
- "total": 0.22437683105468753,
185
- "mean": 0.07479227701822917,
186
- "stdev": 0.0005434471771805967,
187
- "p50": 0.07511347198486328,
188
- "p90": 0.07521177520751954,
189
- "p95": 0.07522406311035157,
190
- "p99": 0.07523389343261719,
191
  "values": [
192
- 0.0752363510131836,
193
- 0.07511347198486328,
194
- 0.07402700805664063
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
- "value": 80.22218655727805
200
  },
201
  "energy": null,
202
  "efficiency": null
 
107
  "overall": {
108
  "memory": {
109
  "unit": "MB",
110
+ "max_ram": 1107.959808,
111
  "max_global_vram": 3376.939008,
112
  "max_process_vram": 0.0,
113
  "max_reserved": 2730.491904,
 
116
  "latency": {
117
  "unit": "s",
118
  "count": 5,
119
+ "total": 0.9226157989501953,
120
+ "mean": 0.18452315979003905,
121
+ "stdev": 0.2181892195993897,
122
+ "p50": 0.07546060943603515,
123
+ "p90": 0.4028712921142579,
124
+ "p95": 0.5118863296508788,
125
+ "p99": 0.5990983596801758,
126
  "values": [
127
+ 0.6209013671875,
128
+ 0.07582617950439453,
129
+ 0.07546060943603515,
130
+ 0.07518617248535156,
131
+ 0.07524147033691406
132
  ]
133
  },
134
  "throughput": {
135
  "unit": "samples/s",
136
+ "value": 54.1937392107233
137
  },
138
  "energy": null,
139
  "efficiency": null
 
141
  "warmup": {
142
  "memory": {
143
  "unit": "MB",
144
+ "max_ram": 1107.959808,
145
  "max_global_vram": 3376.939008,
146
  "max_process_vram": 0.0,
147
  "max_reserved": 2730.491904,
 
150
  "latency": {
151
  "unit": "s",
152
  "count": 2,
153
+ "total": 0.6967275466918945,
154
+ "mean": 0.34836377334594726,
155
+ "stdev": 0.27253759384155274,
156
+ "p50": 0.34836377334594726,
157
+ "p90": 0.5663938484191895,
158
+ "p95": 0.5936476078033447,
159
+ "p99": 0.6154506153106689,
160
  "values": [
161
+ 0.6209013671875,
162
+ 0.07582617950439453
163
  ]
164
  },
165
  "throughput": {
166
  "unit": "samples/s",
167
+ "value": 11.482250182276408
168
  },
169
  "energy": null,
170
  "efficiency": null
 
172
  "train": {
173
  "memory": {
174
  "unit": "MB",
175
+ "max_ram": 1107.959808,
176
  "max_global_vram": 3376.939008,
177
  "max_process_vram": 0.0,
178
  "max_reserved": 2730.491904,
 
181
  "latency": {
182
  "unit": "s",
183
  "count": 3,
184
+ "total": 0.22588825225830075,
185
+ "mean": 0.07529608408610025,
186
+ "stdev": 0.0001185071198290258,
187
+ "p50": 0.07524147033691406,
188
+ "p90": 0.07541678161621093,
189
+ "p95": 0.07543869552612303,
190
+ "p99": 0.07545622665405273,
191
  "values": [
192
+ 0.07546060943603515,
193
+ 0.07518617248535156,
194
+ 0.07524147033691406
195
  ]
196
  },
197
  "throughput": {
198
  "unit": "samples/s",
199
+ "value": 79.68541887436092
200
  },
201
  "energy": null,
202
  "efficiency": null