File size: 3,622 Bytes
86a7286
 
 
 
972f1e0
86a7286
 
 
b7fa4f6
3d4e58c
 
86a7286
9f77b7c
86a7286
 
 
3d4e58c
 
86a7286
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1a02420
86a7286
 
 
 
 
 
 
 
 
 
 
 
 
 
266bda1
6186c49
 
86a7286
 
 
5aec944
 
 
86a7286
 
5aec944
86a7286
 
 
cddbbaf
 
 
 
 
 
 
86a7286
 
cddbbaf
 
d0198a5
5aec944
 
86a7286
28f7226
86a7286
7e44300
86a7286
 
 
5aec944
86a7286
 
22d2308
86a7286
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
{
    "name": "cuda_inference_transformers_text-classification_FacebookAI/roberta-base",
    "backend": {
        "name": "pytorch",
        "version": "2.4.1+rocm6.1",
        "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
        "task": "text-classification",
        "library": "transformers",
        "model_type": "roberta",
        "model": "FacebookAI/roberta-base",
        "processor": "FacebookAI/roberta-base",
        "device": "cuda",
        "device_ids": "4",
        "seed": 42,
        "inter_op_num_threads": null,
        "intra_op_num_threads": null,
        "model_kwargs": {},
        "processor_kwargs": {},
        "no_weights": true,
        "device_map": null,
        "torch_dtype": null,
        "eval_mode": true,
        "to_bettertransformer": false,
        "low_cpu_mem_usage": null,
        "attn_implementation": null,
        "cache_implementation": null,
        "autocast_enabled": false,
        "autocast_dtype": null,
        "torch_compile": false,
        "torch_compile_target": "forward",
        "torch_compile_config": {},
        "quantization_scheme": null,
        "quantization_config": {},
        "deepspeed_inference": false,
        "deepspeed_inference_config": {},
        "peft_type": null,
        "peft_config": {}
    },
    "scenario": {
        "name": "inference",
        "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
        "iterations": 1,
        "duration": 1,
        "warmup_runs": 1,
        "input_shapes": {
            "batch_size": 1,
            "num_choices": 2,
            "sequence_length": 2
        },
        "new_tokens": null,
        "memory": true,
        "latency": true,
        "energy": false,
        "forward_kwargs": {},
        "generate_kwargs": {
            "max_new_tokens": 2,
            "min_new_tokens": 2
        },
        "call_kwargs": {
            "num_inference_steps": 2
        }
    },
    "launcher": {
        "name": "process",
        "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
        "device_isolation": true,
        "device_isolation_action": "error",
        "numactl": false,
        "numactl_kwargs": {},
        "start_method": "spawn"
    },
    "environment": {
        "cpu": " AMD EPYC 7763 64-Core Processor",
        "cpu_count": 128,
        "cpu_ram_mb": 1082015.256576,
        "system": "Linux",
        "machine": "x86_64",
        "platform": "Linux-5.15.0-101-generic-x86_64-with-glibc2.35",
        "processor": "x86_64",
        "python_version": "3.10.12",
        "gpu": [
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]"
        ],
        "gpu_count": 8,
        "gpu_vram_mb": 549621596160,
        "optimum_benchmark_version": "0.4.0",
        "optimum_benchmark_commit": null,
        "transformers_version": "4.44.2",
        "transformers_commit": null,
        "accelerate_version": "0.34.0",
        "accelerate_commit": null,
        "diffusers_version": "0.30.2",
        "diffusers_commit": null,
        "optimum_version": null,
        "optimum_commit": null,
        "timm_version": "1.0.9",
        "timm_commit": null,
        "peft_version": null,
        "peft_commit": null
    }
}