File size: 3,632 Bytes
cf99fd0
 
 
 
dd6e14a
cf99fd0
 
 
7000aa6
4147368
 
cf99fd0
40abca5
cf99fd0
 
 
4147368
 
cf99fd0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a7a41c0
cf99fd0
 
 
 
 
 
 
 
 
 
 
 
 
 
b9632d8
11ac9a3
 
cf99fd0
 
 
947f6ae
 
 
cf99fd0
 
947f6ae
cf99fd0
 
 
352293b
 
 
 
 
 
 
cf99fd0
 
352293b
 
09fb9d8
947f6ae
 
cf99fd0
4cd7ed0
cf99fd0
fe62395
cf99fd0
 
 
947f6ae
cf99fd0
b9632d8
10fa8cd
cf99fd0
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
{
    "name": "cuda_inference_transformers_fill-mask_google-bert/bert-base-uncased",
    "backend": {
        "name": "pytorch",
        "version": "2.2.0.dev20231010+rocm5.7",
        "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
        "task": "fill-mask",
        "library": "transformers",
        "model_type": "bert",
        "model": "google-bert/bert-base-uncased",
        "processor": "google-bert/bert-base-uncased",
        "device": "cuda",
        "device_ids": "5",
        "seed": 42,
        "inter_op_num_threads": null,
        "intra_op_num_threads": null,
        "model_kwargs": {},
        "processor_kwargs": {},
        "no_weights": true,
        "device_map": null,
        "torch_dtype": null,
        "eval_mode": true,
        "to_bettertransformer": false,
        "low_cpu_mem_usage": null,
        "attn_implementation": null,
        "cache_implementation": null,
        "autocast_enabled": false,
        "autocast_dtype": null,
        "torch_compile": false,
        "torch_compile_target": "forward",
        "torch_compile_config": {},
        "quantization_scheme": null,
        "quantization_config": {},
        "deepspeed_inference": false,
        "deepspeed_inference_config": {},
        "peft_type": null,
        "peft_config": {}
    },
    "scenario": {
        "name": "inference",
        "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
        "iterations": 1,
        "duration": 1,
        "warmup_runs": 1,
        "input_shapes": {
            "batch_size": 1,
            "num_choices": 2,
            "sequence_length": 2
        },
        "new_tokens": null,
        "memory": true,
        "latency": true,
        "energy": false,
        "forward_kwargs": {},
        "generate_kwargs": {
            "max_new_tokens": 2,
            "min_new_tokens": 2
        },
        "call_kwargs": {
            "num_inference_steps": 2
        }
    },
    "launcher": {
        "name": "process",
        "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
        "device_isolation": true,
        "device_isolation_action": "warn",
        "numactl": false,
        "numactl_kwargs": {},
        "start_method": "spawn"
    },
    "environment": {
        "cpu": " AMD EPYC 7763 64-Core Processor",
        "cpu_count": 128,
        "cpu_ram_mb": 1082015.256576,
        "system": "Linux",
        "machine": "x86_64",
        "platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35",
        "processor": "x86_64",
        "python_version": "3.10.12",
        "gpu": [
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]"
        ],
        "gpu_count": 8,
        "gpu_vram_mb": 549621596160,
        "optimum_benchmark_version": "0.4.0",
        "optimum_benchmark_commit": null,
        "transformers_version": "4.44.2",
        "transformers_commit": null,
        "accelerate_version": "0.34.2",
        "accelerate_commit": null,
        "diffusers_version": "0.30.3",
        "diffusers_commit": null,
        "optimum_version": null,
        "optimum_commit": null,
        "timm_version": "1.0.9",
        "timm_commit": null,
        "peft_version": "0.12.0",
        "peft_commit": null
    }
}