IlyasMoutawwakil HF staff commited on
Commit
1fc42fb
·
verified ·
1 Parent(s): 3c957a7

Upload cuda_inference_diffusers_stable-diffusion_CompVis/stable-diffusion-v1-4/benchmark_report.json with huggingface_hub

Browse files
cuda_inference_diffusers_stable-diffusion_CompVis/stable-diffusion-v1-4/benchmark_report.json CHANGED
@@ -2,41 +2,41 @@
2
  "call": {
3
  "memory": {
4
  "unit": "MB",
5
- "max_ram": 1305.366528,
6
- "max_global_vram": 8145.862656,
7
  "max_process_vram": 0.0,
8
- "max_reserved": 7491.026944,
9
- "max_allocated": 6528.9728
10
  },
11
  "latency": {
12
  "unit": "s",
13
  "count": 2,
14
- "total": 1.2740385742187499,
15
- "mean": 0.6370192871093749,
16
- "stdev": 0.0006988525390624889,
17
- "p50": 0.6370192871093749,
18
- "p90": 0.637578369140625,
19
- "p95": 0.6376482543945312,
20
- "p99": 0.6377041625976563,
21
  "values": [
22
- 0.6377181396484375,
23
- 0.6363204345703125
24
  ]
25
  },
26
  "throughput": {
27
  "unit": "images/s",
28
- "value": 1.5698111819152847
29
  },
30
  "energy": {
31
  "unit": "kWh",
32
- "cpu": 7.5299517975913165e-06,
33
- "ram": 4.11896769380751e-06,
34
- "gpu": 4.208628366899909e-05,
35
- "total": 5.3735203160397915e-05
36
  },
37
  "efficiency": {
38
  "unit": "images/kWh",
39
- "value": 18609.774248271307
40
  }
41
  }
42
  }
 
2
  "call": {
3
  "memory": {
4
  "unit": "MB",
5
+ "max_ram": 1305.010176,
6
+ "max_global_vram": 8204.582912,
7
  "max_process_vram": 0.0,
8
+ "max_reserved": 7549.7472,
9
+ "max_allocated": 6519.126016
10
  },
11
  "latency": {
12
  "unit": "s",
13
  "count": 2,
14
+ "total": 1.2939044189453126,
15
+ "mean": 0.6469522094726563,
16
+ "stdev": 0.003930419921874984,
17
+ "p50": 0.6469522094726563,
18
+ "p90": 0.6500965454101563,
19
+ "p95": 0.6504895874023437,
20
+ "p99": 0.6508040209960937,
21
  "values": [
22
+ 0.6508826293945312,
23
+ 0.6430217895507813
24
  ]
25
  },
26
  "throughput": {
27
  "unit": "images/s",
28
+ "value": 1.5457092276029476
29
  },
30
  "energy": {
31
  "unit": "kWh",
32
+ "cpu": 7.638010962141885e-06,
33
+ "ram": 4.178061007119046e-06,
34
+ "gpu": 4.213933926700041e-05,
35
+ "total": 5.395541123626134e-05
36
  },
37
  "efficiency": {
38
  "unit": "images/kWh",
39
+ "value": 18533.822226304128
40
  }
41
  }
42
  }