{ "deepseek-ai/DeepSeek-R1-Distill-Llama-8B": [ { "batch_size": 1, "sequence_length": 4096, "num_cores": 2, "auto_cast_type": "bf16" }, { "batch_size": 4, "sequence_length": 4096, "num_cores": 2, "auto_cast_type": "bf16" }, { "batch_size": 8, "sequence_length": 4096, "num_cores": 2, "auto_cast_type": "bf16" }, { "batch_size": 4, "sequence_length": 4096, "num_cores": 8, "auto_cast_type": "bf16" }, { "batch_size": 8, "sequence_length": 4096, "num_cores": 8, "auto_cast_type": "bf16" }, { "batch_size": 16, "sequence_length": 4096, "num_cores": 8, "auto_cast_type": "bf16" }, { "batch_size": 32, "sequence_length": 4096, "num_cores": 8, "auto_cast_type": "bf16" } ], "princeton-nlp/Sheared-LLaMA-1.3B": [ { "batch_size": 1, "sequence_length": 4096, "num_cores": 2, "auto_cast_type": "fp16" }, { "batch_size": 4, "sequence_length": 4096, "num_cores": 2, "auto_cast_type": "fp16" } ], "lmsys/vicuna-7b-v1.5": [ { "batch_size": 1, "sequence_length": 4096, "num_cores": 2, "auto_cast_type": "fp16" }, { "batch_size": 4, "sequence_length": 4096, "num_cores": 2, "auto_cast_type": "fp16" } ] }