{ "attn_implementation": "flash_attention_2", "bos_token_id": 151643, "eos_token_id": 151643, "max_new_tokens": 2048, "transformers_version": "4.46.3" }