File size: 3,691 Bytes
03822da
 
 
 
1cf8d16
03822da
 
 
2a4ea07
9ece69c
 
03822da
2cb1703
03822da
 
 
9ece69c
 
03822da
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6e4785d
 
 
03822da
 
 
27b59bc
03822da
 
 
 
 
 
 
 
 
 
 
 
 
 
547a49f
7bb33fd
 
03822da
 
 
ccb53bd
 
fa8b748
03822da
 
b16ccfd
03822da
 
 
8a4f392
 
 
 
 
 
 
03822da
 
8a4f392
 
8c70f50
ccb53bd
143a04a
03822da
143a04a
03822da
6e4785d
03822da
 
 
9bb5a13
03822da
143a04a
66f17c2
6c2e562
 
 
03822da
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
{
    "name": "cuda_inference_transformers_image-classification_google/vit-base-patch16-224",
    "backend": {
        "name": "pytorch",
        "version": "2.3.1+rocm5.7",
        "_target_": "optimum_benchmark.backends.pytorch.backend.PyTorchBackend",
        "task": "image-classification",
        "library": "transformers",
        "model_type": "vit",
        "model": "google/vit-base-patch16-224",
        "processor": "google/vit-base-patch16-224",
        "device": "cuda",
        "device_ids": "5",
        "seed": 42,
        "inter_op_num_threads": null,
        "intra_op_num_threads": null,
        "model_kwargs": {},
        "processor_kwargs": {},
        "no_weights": true,
        "device_map": null,
        "torch_dtype": null,
        "eval_mode": true,
        "to_bettertransformer": false,
        "low_cpu_mem_usage": null,
        "attn_implementation": null,
        "cache_implementation": null,
        "autocast_enabled": false,
        "autocast_dtype": null,
        "torch_compile": false,
        "torch_compile_target": "forward",
        "torch_compile_config": {},
        "quantization_scheme": null,
        "quantization_config": {},
        "deepspeed_inference": false,
        "deepspeed_inference_config": {},
        "peft_type": null,
        "peft_config": {}
    },
    "scenario": {
        "name": "inference",
        "_target_": "optimum_benchmark.scenarios.inference.scenario.InferenceScenario",
        "iterations": 1,
        "duration": 1,
        "warmup_runs": 1,
        "input_shapes": {
            "batch_size": 2,
            "sequence_length": 16,
            "num_choices": 2
        },
        "new_tokens": null,
        "memory": true,
        "latency": true,
        "energy": false,
        "forward_kwargs": {},
        "generate_kwargs": {
            "max_new_tokens": 2,
            "min_new_tokens": 2
        },
        "call_kwargs": {
            "num_inference_steps": 2
        }
    },
    "launcher": {
        "name": "process",
        "_target_": "optimum_benchmark.launchers.process.launcher.ProcessLauncher",
        "device_isolation": true,
        "device_isolation_action": "warn",
        "numactl": false,
        "numactl_kwargs": {},
        "start_method": "spawn"
    },
    "environment": {
        "cpu": " AMD EPYC 7763 64-Core Processor",
        "cpu_count": 128,
        "cpu_ram_mb": 1082014.490624,
        "system": "Linux",
        "machine": "x86_64",
        "platform": "Linux-5.15.0-122-generic-x86_64-with-glibc2.35",
        "processor": "x86_64",
        "python_version": "3.10.12",
        "gpu": [
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]",
            "Advanced Micro Devices, Inc. [AMD/ATI]"
        ],
        "gpu_count": 8,
        "gpu_vram_mb": 549621596160,
        "optimum_benchmark_version": "0.5.0.dev0",
        "optimum_benchmark_commit": null,
        "transformers_version": "4.47.0",
        "transformers_commit": null,
        "accelerate_version": "1.2.0",
        "accelerate_commit": null,
        "diffusers_version": "0.31.0",
        "diffusers_commit": null,
        "optimum_version": null,
        "optimum_commit": null,
        "timm_version": "1.0.12",
        "timm_commit": null,
        "peft_version": "0.14.0",
        "peft_commit": null
    },
    "print_report": true,
    "log_report": true
}