{ "_name_or_path": "/artifacts/olmo_7b_toks_600b", "activation_type": "swiglu", "alibi": false, "alibi_bias_max": 8.0, "architectures": [ "OLMoForCausalLM" ], "attention_dropout": 0.0, "attention_layer_norm": false, "attention_layer_norm_with_affine": false, "bias_for_layer_norm": false, "block_group_size": 1, "block_type": "sequential", "clip_qkv": null, "d_model": 4096, "embedding_dropout": 0.0, "embedding_size": 50304, "eos_token_id": 0, "include_bias": false, "init_cutoff_factor": null, "init_device": "meta", "init_fn": "mitchell", "init_std": 0.02, "layer_norm_eps": 1e-05, "layer_norm_type": "default", "layer_norm_with_affine": false, "max_sequence_length": 2048, "mlp_hidden_size": 22016, "mlp_ratio": 4, "model_type": "hf_olmo", "multi_query_attention": false, "n_heads": 32, "n_kv_heads": null, "n_layers": 32, "pad_token_id": 1, "precision": "amp_bf16", "residual_dropout": 0.0, "rope": true, "rope_full_precision": true, "scale_logits": false, "torch_dtype": "float32", "transformers_version": "4.44.0", "use_cache": true, "vocab_size": 50280, "weight_tying": false }