{ "architectures": [ "KBERTForMaskedLM" ], "head_dropout": 0.0, "intermediate_dim": 998, "logit_softcap": 15, "model_dim": 768, "num_attention_heads": 6, "num_layers": 20, "tokenizer_uri": "answerdotai/ModernBERT-base", "torch_dtype": "bfloat16", "transformers_version": "4.48.0" }