alexshengzhili's picture
Upload Phi3ForCausalLM
b3f8b4c verified
raw
history blame
3.44 kB
{
"_name_or_path": "microsoft/Phi-3-mini-128k-instruct",
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModel": "modeling_phi3.Phi3ForCausalLM",
"AutoModelForCausalLM": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0700000524520874,
1.1200000047683716,
1.149999976158142,
1.4199999570846558,
1.5699999332427979,
1.7999999523162842,
2.129999876022339,
2.129999876022339,
3.009999990463257,
5.910000324249268,
6.950000286102295,
9.070000648498535,
9.930000305175781,
10.710000038146973,
11.130000114440918,
14.609999656677246,
15.409998893737793,
19.809999465942383,
37.279998779296875,
38.279998779296875,
38.599998474121094,
40.12000274658203,
46.20000457763672,
50.940006256103516,
53.66000747680664,
54.9373893737793,
56.89738845825195,
57.28738784790039,
59.98738479614258,
60.86738586425781,
60.887386322021484,
61.71739196777344,
62.91739273071289,
62.957393646240234,
63.41739273071289,
63.8173942565918,
63.83739471435547,
63.897396087646484,
63.93739700317383,
64.06739807128906,
64.11434936523438,
64.12435150146484,
64.15435028076172,
64.19435119628906,
64.24435424804688,
64.57435607910156,
64.69000244140625,
64.76000213623047
],
"short_factor": [
1.1,
1.1,
1.1,
1.3000000000000003,
1.3500000000000003,
1.3500000000000003,
1.4000000000000004,
1.5500000000000005,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.000000000000001,
2.0500000000000007,
2.0500000000000007,
2.0500000000000007,
2.0500000000000007,
2.0500000000000007,
2.0500000000000007,
2.1000000000000005,
2.1000000000000005,
2.1500000000000004,
2.25,
2.25,
2.25,
2.25,
2.25,
2.3999999999999995,
2.4499999999999993,
2.499999999999999,
2.6999999999999984,
2.6999999999999984,
2.7499999999999982,
2.799999999999998,
2.8999999999999977,
3.049999999999997
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.43.3",
"use_cache": true,
"vocab_size": 32064
}