{ "architectures": [ "CustomModelForCausalLM" ], "model_type": "custom_llm", "n_embd": 640, "n_head": 10, "n_layer": 12, "n_positions": 512, "torch_dtype": "float32", "transformers_version": "4.48.2", "vocab_size": 50000 }