IlyasMoutawwakil HF staff commited on
Commit
a2fae74
·
verified ·
1 Parent(s): 912a768

Upload cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_transformers_token-classification_microsoft/deberta-v3-base/benchmark_report.json ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "forward": {
3
+ "memory": {
4
+ "unit": "MB",
5
+ "max_ram": 1026.53952,
6
+ "max_global_vram": 1122.848768,
7
+ "max_process_vram": 307182.682112,
8
+ "max_reserved": 773.849088,
9
+ "max_allocated": 745.087488
10
+ },
11
+ "latency": {
12
+ "unit": "s",
13
+ "count": 76,
14
+ "total": 0.9987027988433838,
15
+ "mean": 0.01314082630057084,
16
+ "stdev": 0.0012102212567672629,
17
+ "p50": 0.012344738006591797,
18
+ "p90": 0.014669605731964111,
19
+ "p95": 0.014893885135650636,
20
+ "p99": 0.016816754341125485,
21
+ "values": [
22
+ 0.01644983673095703,
23
+ 0.01438008689880371,
24
+ 0.014216888427734374,
25
+ 0.014499926567077636,
26
+ 0.013900569915771484,
27
+ 0.013988409996032715,
28
+ 0.01470136547088623,
29
+ 0.014587126731872558,
30
+ 0.014549047470092773,
31
+ 0.014414007186889649,
32
+ 0.014719924926757812,
33
+ 0.014637845993041992,
34
+ 0.014552886962890626,
35
+ 0.014553366661071777,
36
+ 0.01491560459136963,
37
+ 0.01480552577972412,
38
+ 0.014886645317077637,
39
+ 0.0160821590423584,
40
+ 0.017917507171630858,
41
+ 0.014252887725830079,
42
+ 0.0138789701461792,
43
+ 0.013882169723510742,
44
+ 0.013765851020812988,
45
+ 0.013286012649536133,
46
+ 0.01302601432800293,
47
+ 0.0125303373336792,
48
+ 0.012267778396606446,
49
+ 0.01222553825378418,
50
+ 0.012255778312683105,
51
+ 0.012324578285217285,
52
+ 0.012235939025878906,
53
+ 0.0122298583984375,
54
+ 0.012260578155517577,
55
+ 0.012239937782287597,
56
+ 0.012200899124145508,
57
+ 0.01228233814239502,
58
+ 0.012196099281311035,
59
+ 0.012244897842407227,
60
+ 0.01228841781616211,
61
+ 0.012182177543640137,
62
+ 0.012209057807922363,
63
+ 0.012272257804870606,
64
+ 0.012211138725280762,
65
+ 0.01218985939025879,
66
+ 0.01227737808227539,
67
+ 0.012199777603149414,
68
+ 0.012237217903137207,
69
+ 0.0122100191116333,
70
+ 0.012236418724060059,
71
+ 0.012259138107299805,
72
+ 0.012309057235717773,
73
+ 0.01221865940093994,
74
+ 0.012205537796020507,
75
+ 0.01224457836151123,
76
+ 0.01221257781982422,
77
+ 0.012248417854309083,
78
+ 0.012364897727966308,
79
+ 0.01223481845855713,
80
+ 0.012457056999206543,
81
+ 0.013566011428833008,
82
+ 0.012853215217590332,
83
+ 0.014004570007324219,
84
+ 0.013667290687561034,
85
+ 0.013029375076293946,
86
+ 0.013071773529052734,
87
+ 0.012851295471191406,
88
+ 0.01291993522644043,
89
+ 0.013096413612365722,
90
+ 0.012450016975402833,
91
+ 0.012221537590026855,
92
+ 0.012205697059631348,
93
+ 0.012214179039001466,
94
+ 0.01218073844909668,
95
+ 0.012317697525024414,
96
+ 0.012214978218078614,
97
+ 0.012224417686462402
98
+ ]
99
+ },
100
+ "throughput": {
101
+ "unit": "samples/s",
102
+ "value": 76.09871534155808
103
+ },
104
+ "energy": null,
105
+ "efficiency": null
106
+ }
107
+ }