Delta-Vector commited on
Commit
3450414
·
verified ·
1 Parent(s): 00a9216

Training in progress, step 375, checkpoint

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. checkpoint-375/added_tokens.json +28 -0
  2. checkpoint-375/config.json +28 -0
  3. checkpoint-375/generation_config.json +7 -0
  4. checkpoint-375/global_step375/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt +3 -0
  5. checkpoint-375/global_step375/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt +3 -0
  6. checkpoint-375/global_step375/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt +3 -0
  7. checkpoint-375/global_step375/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt +3 -0
  8. checkpoint-375/global_step375/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt +3 -0
  9. checkpoint-375/global_step375/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt +3 -0
  10. checkpoint-375/global_step375/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt +3 -0
  11. checkpoint-375/global_step375/bf16_zero_pp_rank_7_mp_rank_00_optim_states.pt +3 -0
  12. checkpoint-375/global_step375/zero_pp_rank_0_mp_rank_00_model_states.pt +3 -0
  13. checkpoint-375/global_step375/zero_pp_rank_1_mp_rank_00_model_states.pt +3 -0
  14. checkpoint-375/global_step375/zero_pp_rank_2_mp_rank_00_model_states.pt +3 -0
  15. checkpoint-375/global_step375/zero_pp_rank_3_mp_rank_00_model_states.pt +3 -0
  16. checkpoint-375/global_step375/zero_pp_rank_4_mp_rank_00_model_states.pt +3 -0
  17. checkpoint-375/global_step375/zero_pp_rank_5_mp_rank_00_model_states.pt +3 -0
  18. checkpoint-375/global_step375/zero_pp_rank_6_mp_rank_00_model_states.pt +3 -0
  19. checkpoint-375/global_step375/zero_pp_rank_7_mp_rank_00_model_states.pt +3 -0
  20. checkpoint-375/latest +1 -0
  21. checkpoint-375/merges.txt +0 -0
  22. checkpoint-375/model-00001-of-00014.safetensors +3 -0
  23. checkpoint-375/model-00002-of-00014.safetensors +3 -0
  24. checkpoint-375/model-00003-of-00014.safetensors +3 -0
  25. checkpoint-375/model-00004-of-00014.safetensors +3 -0
  26. checkpoint-375/model-00005-of-00014.safetensors +3 -0
  27. checkpoint-375/model-00006-of-00014.safetensors +3 -0
  28. checkpoint-375/model-00007-of-00014.safetensors +3 -0
  29. checkpoint-375/model-00008-of-00014.safetensors +3 -0
  30. checkpoint-375/model-00009-of-00014.safetensors +3 -0
  31. checkpoint-375/model-00010-of-00014.safetensors +3 -0
  32. checkpoint-375/model-00011-of-00014.safetensors +3 -0
  33. checkpoint-375/model-00012-of-00014.safetensors +3 -0
  34. checkpoint-375/model-00013-of-00014.safetensors +3 -0
  35. checkpoint-375/model-00014-of-00014.safetensors +3 -0
  36. checkpoint-375/model.safetensors.index.json +778 -0
  37. checkpoint-375/rng_state_0.pth +3 -0
  38. checkpoint-375/rng_state_1.pth +3 -0
  39. checkpoint-375/rng_state_2.pth +3 -0
  40. checkpoint-375/rng_state_3.pth +3 -0
  41. checkpoint-375/rng_state_4.pth +3 -0
  42. checkpoint-375/rng_state_5.pth +3 -0
  43. checkpoint-375/rng_state_6.pth +3 -0
  44. checkpoint-375/rng_state_7.pth +3 -0
  45. checkpoint-375/scheduler.pt +3 -0
  46. checkpoint-375/special_tokens_map.json +31 -0
  47. checkpoint-375/tokenizer.json +3 -0
  48. checkpoint-375/tokenizer_config.json +240 -0
  49. checkpoint-375/trainer_state.json +2658 -0
  50. checkpoint-375/training_args.bin +3 -0
checkpoint-375/added_tokens.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</think>": 151668,
3
+ "</tool_call>": 151658,
4
+ "</tool_response>": 151666,
5
+ "<think>": 151667,
6
+ "<tool_call>": 151657,
7
+ "<tool_response>": 151665,
8
+ "<|box_end|>": 151649,
9
+ "<|box_start|>": 151648,
10
+ "<|endoftext|>": 151643,
11
+ "<|file_sep|>": 151664,
12
+ "<|fim_middle|>": 151660,
13
+ "<|fim_pad|>": 151662,
14
+ "<|fim_prefix|>": 151659,
15
+ "<|fim_suffix|>": 151661,
16
+ "<|im_end|>": 151645,
17
+ "<|im_start|>": 151644,
18
+ "<|image_pad|>": 151655,
19
+ "<|object_ref_end|>": 151647,
20
+ "<|object_ref_start|>": 151646,
21
+ "<|quad_end|>": 151651,
22
+ "<|quad_start|>": 151650,
23
+ "<|repo_name|>": 151663,
24
+ "<|video_pad|>": 151656,
25
+ "<|vision_end|>": 151653,
26
+ "<|vision_pad|>": 151654,
27
+ "<|vision_start|>": 151652
28
+ }
checkpoint-375/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "NewEden/Hamanasu-32B-V1",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "eos_token_id": 151645,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 5120,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 27648,
12
+ "max_position_embeddings": 131072,
13
+ "max_window_layers": 64,
14
+ "model_type": "qwen2",
15
+ "num_attention_heads": 40,
16
+ "num_hidden_layers": 64,
17
+ "num_key_value_heads": 8,
18
+ "rms_norm_eps": 1e-05,
19
+ "rope_scaling": null,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": 32768,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.49.0",
25
+ "use_cache": false,
26
+ "use_sliding_window": false,
27
+ "vocab_size": 152064
28
+ }
checkpoint-375/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151643,
4
+ "do_sample": true,
5
+ "eos_token_id": 151645,
6
+ "transformers_version": "4.49.0"
7
+ }
checkpoint-375/global_step375/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6060f1df867de68f01cb04aab5316f90a3a346509f00a2b2f737dda4e7aa9018
3
+ size 24702840487
checkpoint-375/global_step375/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3da006f1544e9938bb0428feed5a3350d439a5f1abdd8ee27006f02ff577a912
3
+ size 24702840487
checkpoint-375/global_step375/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ce503b01d9c8a9ea23999482abe9f110be5d213abfb828af1d11e8d5129e613
3
+ size 24702840487
checkpoint-375/global_step375/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8550db49f9867d3b04c2de2984c59f8e798e81f47fed2b34ea8687b8e729a1ea
3
+ size 24702840487
checkpoint-375/global_step375/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9582d83c184a055386a279dbe2d196bdbd4dfbf410c04997ac0203e3b24ee8de
3
+ size 24702840487
checkpoint-375/global_step375/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9a1f67ab6176964fc551d3fd9e4af57b5e42919edbedcca9e1f17342bcec77db
3
+ size 24702840487
checkpoint-375/global_step375/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39f229135f43a64794ac922cb514256a68ff747287c2275ceae6ff1c609f20a4
3
+ size 24702840487
checkpoint-375/global_step375/bf16_zero_pp_rank_7_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c5e6bc7d1a25cbd6f3d052f619016ec2aebc7c353549b2e9dbdc99e280a7453
3
+ size 24702840487
checkpoint-375/global_step375/zero_pp_rank_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80b35073fef1994d2597ecfce9ee2605599e0ae5220f5669aa48c12b31c934d3
3
+ size 381701
checkpoint-375/global_step375/zero_pp_rank_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b81de293bb1b5c13a2a11de3cb6647cfb06682e62d52600931b58cb4ea5ed35f
3
+ size 381701
checkpoint-375/global_step375/zero_pp_rank_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea248f8f8f34e7fb2ca21d53e298285858ab4d2c68449ba116db1ea88abf1f16
3
+ size 381701
checkpoint-375/global_step375/zero_pp_rank_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6fdcb5dc0587d718fa9b6b3b4a7dab3e46dfd2cce679ad80aa01296a978ce71
3
+ size 381701
checkpoint-375/global_step375/zero_pp_rank_4_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9764abf88567d1a52e5032f541cac884a14c50b392f31bc8dab53ad7248f6f3c
3
+ size 381701
checkpoint-375/global_step375/zero_pp_rank_5_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19ad73c60bae308f34139ada8e5df952341795a3b46d5a504bb2f25784a8ac59
3
+ size 381701
checkpoint-375/global_step375/zero_pp_rank_6_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4f0c1066cb00eaf12a83594233a96bffd349898adc04f190a1852122d3904f2
3
+ size 381701
checkpoint-375/global_step375/zero_pp_rank_7_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9b35f7343f7ec2c7e8fe5d235480128eb0ff4d43523685d0277bef8d08aa674
3
+ size 381701
checkpoint-375/latest ADDED
@@ -0,0 +1 @@
 
 
1
+ global_step375
checkpoint-375/merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-375/model-00001-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:941f91cefaf906dc9eedc534df199263d48554f5f6c4b85868dc5ab3f35c098d
3
+ size 4891730992
checkpoint-375/model-00002-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e10b1885e766324ef17cc8ce992d679aa4803dd7eb65e02eea45cb9e8de388c
3
+ size 4876059352
checkpoint-375/model-00003-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25645cab4c024a18417b2d3c7f8564f474fa12af33d6a8a161611bdb20259475
3
+ size 4876059384
checkpoint-375/model-00004-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11eeeab9fddc35b3b9941aa0b65e4989f77fac35439bdbfe0dedca0f32d236b0
3
+ size 4876059416
checkpoint-375/model-00005-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd8d2b3b4c464fe3e5072051cb3c9d735a6045a78f542d9485857709ed040377
3
+ size 4876059416
checkpoint-375/model-00006-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f12da516093f0a62f0cd4e6c1d55e03a78b425e247087a9be147a698dc35c6d1
3
+ size 4876059416
checkpoint-375/model-00007-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:190933eb8e008d86110c960966c38f817d59c62ec7fc931f235fa943b4b156e9
3
+ size 4876059416
checkpoint-375/model-00008-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:156016a6abecb914c86e550e8b08d3d779b3c3984a55400f0c10eb6f33258c8a
3
+ size 4876059416
checkpoint-375/model-00009-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be0fbb1a3dc589d545a3f55596a4de9127b0e4c7f91159595718126a314523d8
3
+ size 4876059416
checkpoint-375/model-00010-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e609ebc95c1089fb91ffc1d06a118502e9bb98b9bba309064c7b6c2490417237
3
+ size 4876059416
checkpoint-375/model-00011-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6bde0eb6409fad4109925bfd18c036fe492d0d8dce2ba22890bc8a6fa9dedc54
3
+ size 4876059416
checkpoint-375/model-00012-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d4d707e3e0fb4ce04335fd1926e2861e1cab39210098220606e79bc69bcadd9c
3
+ size 4876059416
checkpoint-375/model-00013-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9fb73c4938f60c43cfb402f20bba613d41c11124540ed6872ed4408eaf928626
3
+ size 4876059416
checkpoint-375/model-00014-of-00014.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fed6d33ff07eff4f29e971692a82edbbd697edeb9d2625a0dfb12f1433c16978
3
+ size 2123397800
checkpoint-375/model.safetensors.index.json ADDED
@@ -0,0 +1,778 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 65527752704
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00014-of-00014.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00014.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00014.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00014.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00003-of-00014.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00003-of-00014.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00003-of-00014.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00004-of-00014.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00004-of-00014.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00004-of-00014.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00004-of-00014.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00004-of-00014.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00004-of-00014.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00004-of-00014.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00004-of-00014.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00005-of-00014.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00004-of-00014.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00004-of-00014.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00004-of-00014.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00004-of-00014.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00004-of-00014.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00004-of-00014.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00004-of-00014.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00004-of-00014.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00005-of-00014.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00014.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00014.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00014.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00014.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00005-of-00014.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00005-of-00014.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00005-of-00014.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00005-of-00014.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00005-of-00014.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00005-of-00014.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00006-of-00014.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00005-of-00014.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00005-of-00014.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00005-of-00014.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00005-of-00014.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00005-of-00014.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00005-of-00014.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00005-of-00014.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00005-of-00014.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00006-of-00014.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00006-of-00014.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00006-of-00014.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00006-of-00014.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00006-of-00014.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00006-of-00014.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00006-of-00014.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
260
+ "model.layers.28.input_layernorm.weight": "model-00007-of-00014.safetensors",
261
+ "model.layers.28.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
262
+ "model.layers.28.mlp.gate_proj.weight": "model-00006-of-00014.safetensors",
263
+ "model.layers.28.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
264
+ "model.layers.28.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
265
+ "model.layers.28.self_attn.k_proj.bias": "model-00006-of-00014.safetensors",
266
+ "model.layers.28.self_attn.k_proj.weight": "model-00006-of-00014.safetensors",
267
+ "model.layers.28.self_attn.o_proj.weight": "model-00006-of-00014.safetensors",
268
+ "model.layers.28.self_attn.q_proj.bias": "model-00006-of-00014.safetensors",
269
+ "model.layers.28.self_attn.q_proj.weight": "model-00006-of-00014.safetensors",
270
+ "model.layers.28.self_attn.v_proj.bias": "model-00006-of-00014.safetensors",
271
+ "model.layers.28.self_attn.v_proj.weight": "model-00006-of-00014.safetensors",
272
+ "model.layers.29.input_layernorm.weight": "model-00007-of-00014.safetensors",
273
+ "model.layers.29.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
274
+ "model.layers.29.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
275
+ "model.layers.29.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
276
+ "model.layers.29.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
277
+ "model.layers.29.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
278
+ "model.layers.29.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
279
+ "model.layers.29.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
280
+ "model.layers.29.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
281
+ "model.layers.29.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
282
+ "model.layers.29.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
283
+ "model.layers.29.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
284
+ "model.layers.3.input_layernorm.weight": "model-00002-of-00014.safetensors",
285
+ "model.layers.3.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
286
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00014.safetensors",
287
+ "model.layers.3.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
288
+ "model.layers.3.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
289
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00014.safetensors",
290
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00014.safetensors",
291
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00014.safetensors",
292
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00014.safetensors",
293
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00014.safetensors",
294
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00014.safetensors",
295
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00014.safetensors",
296
+ "model.layers.30.input_layernorm.weight": "model-00007-of-00014.safetensors",
297
+ "model.layers.30.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
298
+ "model.layers.30.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
299
+ "model.layers.30.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
300
+ "model.layers.30.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
301
+ "model.layers.30.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
302
+ "model.layers.30.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
303
+ "model.layers.30.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
304
+ "model.layers.30.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
305
+ "model.layers.30.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
306
+ "model.layers.30.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
307
+ "model.layers.30.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
308
+ "model.layers.31.input_layernorm.weight": "model-00007-of-00014.safetensors",
309
+ "model.layers.31.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
310
+ "model.layers.31.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
311
+ "model.layers.31.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
312
+ "model.layers.31.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
313
+ "model.layers.31.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
314
+ "model.layers.31.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
315
+ "model.layers.31.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
316
+ "model.layers.31.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
317
+ "model.layers.31.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
318
+ "model.layers.31.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
319
+ "model.layers.31.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
320
+ "model.layers.32.input_layernorm.weight": "model-00007-of-00014.safetensors",
321
+ "model.layers.32.mlp.down_proj.weight": "model-00007-of-00014.safetensors",
322
+ "model.layers.32.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
323
+ "model.layers.32.mlp.up_proj.weight": "model-00007-of-00014.safetensors",
324
+ "model.layers.32.post_attention_layernorm.weight": "model-00007-of-00014.safetensors",
325
+ "model.layers.32.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
326
+ "model.layers.32.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
327
+ "model.layers.32.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
328
+ "model.layers.32.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
329
+ "model.layers.32.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
330
+ "model.layers.32.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
331
+ "model.layers.32.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
332
+ "model.layers.33.input_layernorm.weight": "model-00008-of-00014.safetensors",
333
+ "model.layers.33.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
334
+ "model.layers.33.mlp.gate_proj.weight": "model-00007-of-00014.safetensors",
335
+ "model.layers.33.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
336
+ "model.layers.33.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
337
+ "model.layers.33.self_attn.k_proj.bias": "model-00007-of-00014.safetensors",
338
+ "model.layers.33.self_attn.k_proj.weight": "model-00007-of-00014.safetensors",
339
+ "model.layers.33.self_attn.o_proj.weight": "model-00007-of-00014.safetensors",
340
+ "model.layers.33.self_attn.q_proj.bias": "model-00007-of-00014.safetensors",
341
+ "model.layers.33.self_attn.q_proj.weight": "model-00007-of-00014.safetensors",
342
+ "model.layers.33.self_attn.v_proj.bias": "model-00007-of-00014.safetensors",
343
+ "model.layers.33.self_attn.v_proj.weight": "model-00007-of-00014.safetensors",
344
+ "model.layers.34.input_layernorm.weight": "model-00008-of-00014.safetensors",
345
+ "model.layers.34.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
346
+ "model.layers.34.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
347
+ "model.layers.34.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
348
+ "model.layers.34.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
349
+ "model.layers.34.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
350
+ "model.layers.34.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
351
+ "model.layers.34.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
352
+ "model.layers.34.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
353
+ "model.layers.34.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
354
+ "model.layers.34.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
355
+ "model.layers.34.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
356
+ "model.layers.35.input_layernorm.weight": "model-00008-of-00014.safetensors",
357
+ "model.layers.35.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
358
+ "model.layers.35.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
359
+ "model.layers.35.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
360
+ "model.layers.35.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
361
+ "model.layers.35.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
362
+ "model.layers.35.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
363
+ "model.layers.35.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
364
+ "model.layers.35.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
365
+ "model.layers.35.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
366
+ "model.layers.35.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
367
+ "model.layers.35.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
368
+ "model.layers.36.input_layernorm.weight": "model-00008-of-00014.safetensors",
369
+ "model.layers.36.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
370
+ "model.layers.36.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
371
+ "model.layers.36.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
372
+ "model.layers.36.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
373
+ "model.layers.36.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
374
+ "model.layers.36.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
375
+ "model.layers.36.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
376
+ "model.layers.36.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
377
+ "model.layers.36.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
378
+ "model.layers.36.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
379
+ "model.layers.36.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
380
+ "model.layers.37.input_layernorm.weight": "model-00008-of-00014.safetensors",
381
+ "model.layers.37.mlp.down_proj.weight": "model-00008-of-00014.safetensors",
382
+ "model.layers.37.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
383
+ "model.layers.37.mlp.up_proj.weight": "model-00008-of-00014.safetensors",
384
+ "model.layers.37.post_attention_layernorm.weight": "model-00008-of-00014.safetensors",
385
+ "model.layers.37.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
386
+ "model.layers.37.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
387
+ "model.layers.37.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
388
+ "model.layers.37.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
389
+ "model.layers.37.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
390
+ "model.layers.37.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
391
+ "model.layers.37.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
392
+ "model.layers.38.input_layernorm.weight": "model-00009-of-00014.safetensors",
393
+ "model.layers.38.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
394
+ "model.layers.38.mlp.gate_proj.weight": "model-00008-of-00014.safetensors",
395
+ "model.layers.38.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
396
+ "model.layers.38.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
397
+ "model.layers.38.self_attn.k_proj.bias": "model-00008-of-00014.safetensors",
398
+ "model.layers.38.self_attn.k_proj.weight": "model-00008-of-00014.safetensors",
399
+ "model.layers.38.self_attn.o_proj.weight": "model-00008-of-00014.safetensors",
400
+ "model.layers.38.self_attn.q_proj.bias": "model-00008-of-00014.safetensors",
401
+ "model.layers.38.self_attn.q_proj.weight": "model-00008-of-00014.safetensors",
402
+ "model.layers.38.self_attn.v_proj.bias": "model-00008-of-00014.safetensors",
403
+ "model.layers.38.self_attn.v_proj.weight": "model-00008-of-00014.safetensors",
404
+ "model.layers.39.input_layernorm.weight": "model-00009-of-00014.safetensors",
405
+ "model.layers.39.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
406
+ "model.layers.39.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
407
+ "model.layers.39.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
408
+ "model.layers.39.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
409
+ "model.layers.39.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
410
+ "model.layers.39.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
411
+ "model.layers.39.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
412
+ "model.layers.39.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
413
+ "model.layers.39.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
414
+ "model.layers.39.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
415
+ "model.layers.39.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
416
+ "model.layers.4.input_layernorm.weight": "model-00002-of-00014.safetensors",
417
+ "model.layers.4.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
418
+ "model.layers.4.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
419
+ "model.layers.4.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
420
+ "model.layers.4.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
421
+ "model.layers.4.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
422
+ "model.layers.4.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
423
+ "model.layers.4.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
424
+ "model.layers.4.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
425
+ "model.layers.4.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
426
+ "model.layers.4.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
427
+ "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
428
+ "model.layers.40.input_layernorm.weight": "model-00009-of-00014.safetensors",
429
+ "model.layers.40.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
430
+ "model.layers.40.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
431
+ "model.layers.40.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
432
+ "model.layers.40.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
433
+ "model.layers.40.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
434
+ "model.layers.40.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
435
+ "model.layers.40.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
436
+ "model.layers.40.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
437
+ "model.layers.40.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
438
+ "model.layers.40.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
439
+ "model.layers.40.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
440
+ "model.layers.41.input_layernorm.weight": "model-00009-of-00014.safetensors",
441
+ "model.layers.41.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
442
+ "model.layers.41.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
443
+ "model.layers.41.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
444
+ "model.layers.41.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
445
+ "model.layers.41.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
446
+ "model.layers.41.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
447
+ "model.layers.41.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
448
+ "model.layers.41.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
449
+ "model.layers.41.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
450
+ "model.layers.41.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
451
+ "model.layers.41.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
452
+ "model.layers.42.input_layernorm.weight": "model-00009-of-00014.safetensors",
453
+ "model.layers.42.mlp.down_proj.weight": "model-00009-of-00014.safetensors",
454
+ "model.layers.42.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
455
+ "model.layers.42.mlp.up_proj.weight": "model-00009-of-00014.safetensors",
456
+ "model.layers.42.post_attention_layernorm.weight": "model-00009-of-00014.safetensors",
457
+ "model.layers.42.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
458
+ "model.layers.42.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
459
+ "model.layers.42.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
460
+ "model.layers.42.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
461
+ "model.layers.42.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
462
+ "model.layers.42.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
463
+ "model.layers.42.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
464
+ "model.layers.43.input_layernorm.weight": "model-00010-of-00014.safetensors",
465
+ "model.layers.43.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
466
+ "model.layers.43.mlp.gate_proj.weight": "model-00009-of-00014.safetensors",
467
+ "model.layers.43.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
468
+ "model.layers.43.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
469
+ "model.layers.43.self_attn.k_proj.bias": "model-00009-of-00014.safetensors",
470
+ "model.layers.43.self_attn.k_proj.weight": "model-00009-of-00014.safetensors",
471
+ "model.layers.43.self_attn.o_proj.weight": "model-00009-of-00014.safetensors",
472
+ "model.layers.43.self_attn.q_proj.bias": "model-00009-of-00014.safetensors",
473
+ "model.layers.43.self_attn.q_proj.weight": "model-00009-of-00014.safetensors",
474
+ "model.layers.43.self_attn.v_proj.bias": "model-00009-of-00014.safetensors",
475
+ "model.layers.43.self_attn.v_proj.weight": "model-00009-of-00014.safetensors",
476
+ "model.layers.44.input_layernorm.weight": "model-00010-of-00014.safetensors",
477
+ "model.layers.44.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
478
+ "model.layers.44.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
479
+ "model.layers.44.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
480
+ "model.layers.44.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
481
+ "model.layers.44.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
482
+ "model.layers.44.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
483
+ "model.layers.44.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
484
+ "model.layers.44.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
485
+ "model.layers.44.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
486
+ "model.layers.44.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
487
+ "model.layers.44.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
488
+ "model.layers.45.input_layernorm.weight": "model-00010-of-00014.safetensors",
489
+ "model.layers.45.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
490
+ "model.layers.45.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
491
+ "model.layers.45.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
492
+ "model.layers.45.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
493
+ "model.layers.45.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
494
+ "model.layers.45.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
495
+ "model.layers.45.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
496
+ "model.layers.45.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
497
+ "model.layers.45.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
498
+ "model.layers.45.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
499
+ "model.layers.45.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
500
+ "model.layers.46.input_layernorm.weight": "model-00010-of-00014.safetensors",
501
+ "model.layers.46.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
502
+ "model.layers.46.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
503
+ "model.layers.46.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
504
+ "model.layers.46.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
505
+ "model.layers.46.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
506
+ "model.layers.46.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
507
+ "model.layers.46.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
508
+ "model.layers.46.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
509
+ "model.layers.46.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
510
+ "model.layers.46.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
511
+ "model.layers.46.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
512
+ "model.layers.47.input_layernorm.weight": "model-00010-of-00014.safetensors",
513
+ "model.layers.47.mlp.down_proj.weight": "model-00010-of-00014.safetensors",
514
+ "model.layers.47.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
515
+ "model.layers.47.mlp.up_proj.weight": "model-00010-of-00014.safetensors",
516
+ "model.layers.47.post_attention_layernorm.weight": "model-00010-of-00014.safetensors",
517
+ "model.layers.47.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
518
+ "model.layers.47.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
519
+ "model.layers.47.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
520
+ "model.layers.47.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
521
+ "model.layers.47.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
522
+ "model.layers.47.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
523
+ "model.layers.47.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
524
+ "model.layers.48.input_layernorm.weight": "model-00011-of-00014.safetensors",
525
+ "model.layers.48.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
526
+ "model.layers.48.mlp.gate_proj.weight": "model-00010-of-00014.safetensors",
527
+ "model.layers.48.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
528
+ "model.layers.48.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
529
+ "model.layers.48.self_attn.k_proj.bias": "model-00010-of-00014.safetensors",
530
+ "model.layers.48.self_attn.k_proj.weight": "model-00010-of-00014.safetensors",
531
+ "model.layers.48.self_attn.o_proj.weight": "model-00010-of-00014.safetensors",
532
+ "model.layers.48.self_attn.q_proj.bias": "model-00010-of-00014.safetensors",
533
+ "model.layers.48.self_attn.q_proj.weight": "model-00010-of-00014.safetensors",
534
+ "model.layers.48.self_attn.v_proj.bias": "model-00010-of-00014.safetensors",
535
+ "model.layers.48.self_attn.v_proj.weight": "model-00010-of-00014.safetensors",
536
+ "model.layers.49.input_layernorm.weight": "model-00011-of-00014.safetensors",
537
+ "model.layers.49.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
538
+ "model.layers.49.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
539
+ "model.layers.49.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
540
+ "model.layers.49.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
541
+ "model.layers.49.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
542
+ "model.layers.49.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
543
+ "model.layers.49.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
544
+ "model.layers.49.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
545
+ "model.layers.49.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
546
+ "model.layers.49.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
547
+ "model.layers.49.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
548
+ "model.layers.5.input_layernorm.weight": "model-00002-of-00014.safetensors",
549
+ "model.layers.5.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
550
+ "model.layers.5.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
551
+ "model.layers.5.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
552
+ "model.layers.5.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
553
+ "model.layers.5.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
554
+ "model.layers.5.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
555
+ "model.layers.5.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
556
+ "model.layers.5.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
557
+ "model.layers.5.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
558
+ "model.layers.5.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
559
+ "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
560
+ "model.layers.50.input_layernorm.weight": "model-00011-of-00014.safetensors",
561
+ "model.layers.50.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
562
+ "model.layers.50.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
563
+ "model.layers.50.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
564
+ "model.layers.50.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
565
+ "model.layers.50.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
566
+ "model.layers.50.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
567
+ "model.layers.50.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
568
+ "model.layers.50.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
569
+ "model.layers.50.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
570
+ "model.layers.50.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
571
+ "model.layers.50.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
572
+ "model.layers.51.input_layernorm.weight": "model-00011-of-00014.safetensors",
573
+ "model.layers.51.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
574
+ "model.layers.51.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
575
+ "model.layers.51.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
576
+ "model.layers.51.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
577
+ "model.layers.51.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
578
+ "model.layers.51.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
579
+ "model.layers.51.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
580
+ "model.layers.51.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
581
+ "model.layers.51.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
582
+ "model.layers.51.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
583
+ "model.layers.51.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
584
+ "model.layers.52.input_layernorm.weight": "model-00011-of-00014.safetensors",
585
+ "model.layers.52.mlp.down_proj.weight": "model-00011-of-00014.safetensors",
586
+ "model.layers.52.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
587
+ "model.layers.52.mlp.up_proj.weight": "model-00011-of-00014.safetensors",
588
+ "model.layers.52.post_attention_layernorm.weight": "model-00011-of-00014.safetensors",
589
+ "model.layers.52.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
590
+ "model.layers.52.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
591
+ "model.layers.52.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
592
+ "model.layers.52.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
593
+ "model.layers.52.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
594
+ "model.layers.52.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
595
+ "model.layers.52.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
596
+ "model.layers.53.input_layernorm.weight": "model-00012-of-00014.safetensors",
597
+ "model.layers.53.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
598
+ "model.layers.53.mlp.gate_proj.weight": "model-00011-of-00014.safetensors",
599
+ "model.layers.53.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
600
+ "model.layers.53.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
601
+ "model.layers.53.self_attn.k_proj.bias": "model-00011-of-00014.safetensors",
602
+ "model.layers.53.self_attn.k_proj.weight": "model-00011-of-00014.safetensors",
603
+ "model.layers.53.self_attn.o_proj.weight": "model-00011-of-00014.safetensors",
604
+ "model.layers.53.self_attn.q_proj.bias": "model-00011-of-00014.safetensors",
605
+ "model.layers.53.self_attn.q_proj.weight": "model-00011-of-00014.safetensors",
606
+ "model.layers.53.self_attn.v_proj.bias": "model-00011-of-00014.safetensors",
607
+ "model.layers.53.self_attn.v_proj.weight": "model-00011-of-00014.safetensors",
608
+ "model.layers.54.input_layernorm.weight": "model-00012-of-00014.safetensors",
609
+ "model.layers.54.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
610
+ "model.layers.54.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
611
+ "model.layers.54.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
612
+ "model.layers.54.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
613
+ "model.layers.54.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
614
+ "model.layers.54.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
615
+ "model.layers.54.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
616
+ "model.layers.54.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
617
+ "model.layers.54.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
618
+ "model.layers.54.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
619
+ "model.layers.54.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
620
+ "model.layers.55.input_layernorm.weight": "model-00012-of-00014.safetensors",
621
+ "model.layers.55.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
622
+ "model.layers.55.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
623
+ "model.layers.55.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
624
+ "model.layers.55.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
625
+ "model.layers.55.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
626
+ "model.layers.55.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
627
+ "model.layers.55.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
628
+ "model.layers.55.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
629
+ "model.layers.55.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
630
+ "model.layers.55.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
631
+ "model.layers.55.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
632
+ "model.layers.56.input_layernorm.weight": "model-00012-of-00014.safetensors",
633
+ "model.layers.56.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
634
+ "model.layers.56.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
635
+ "model.layers.56.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
636
+ "model.layers.56.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
637
+ "model.layers.56.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
638
+ "model.layers.56.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
639
+ "model.layers.56.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
640
+ "model.layers.56.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
641
+ "model.layers.56.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
642
+ "model.layers.56.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
643
+ "model.layers.56.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
644
+ "model.layers.57.input_layernorm.weight": "model-00012-of-00014.safetensors",
645
+ "model.layers.57.mlp.down_proj.weight": "model-00012-of-00014.safetensors",
646
+ "model.layers.57.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
647
+ "model.layers.57.mlp.up_proj.weight": "model-00012-of-00014.safetensors",
648
+ "model.layers.57.post_attention_layernorm.weight": "model-00012-of-00014.safetensors",
649
+ "model.layers.57.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
650
+ "model.layers.57.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
651
+ "model.layers.57.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
652
+ "model.layers.57.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
653
+ "model.layers.57.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
654
+ "model.layers.57.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
655
+ "model.layers.57.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
656
+ "model.layers.58.input_layernorm.weight": "model-00013-of-00014.safetensors",
657
+ "model.layers.58.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
658
+ "model.layers.58.mlp.gate_proj.weight": "model-00012-of-00014.safetensors",
659
+ "model.layers.58.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
660
+ "model.layers.58.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
661
+ "model.layers.58.self_attn.k_proj.bias": "model-00012-of-00014.safetensors",
662
+ "model.layers.58.self_attn.k_proj.weight": "model-00012-of-00014.safetensors",
663
+ "model.layers.58.self_attn.o_proj.weight": "model-00012-of-00014.safetensors",
664
+ "model.layers.58.self_attn.q_proj.bias": "model-00012-of-00014.safetensors",
665
+ "model.layers.58.self_attn.q_proj.weight": "model-00012-of-00014.safetensors",
666
+ "model.layers.58.self_attn.v_proj.bias": "model-00012-of-00014.safetensors",
667
+ "model.layers.58.self_attn.v_proj.weight": "model-00012-of-00014.safetensors",
668
+ "model.layers.59.input_layernorm.weight": "model-00013-of-00014.safetensors",
669
+ "model.layers.59.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
670
+ "model.layers.59.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
671
+ "model.layers.59.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
672
+ "model.layers.59.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
673
+ "model.layers.59.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
674
+ "model.layers.59.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
675
+ "model.layers.59.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
676
+ "model.layers.59.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
677
+ "model.layers.59.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
678
+ "model.layers.59.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
679
+ "model.layers.59.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
680
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00014.safetensors",
681
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
682
+ "model.layers.6.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
683
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
684
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
685
+ "model.layers.6.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
686
+ "model.layers.6.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
687
+ "model.layers.6.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
688
+ "model.layers.6.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
689
+ "model.layers.6.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
690
+ "model.layers.6.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
691
+ "model.layers.6.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
692
+ "model.layers.60.input_layernorm.weight": "model-00013-of-00014.safetensors",
693
+ "model.layers.60.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
694
+ "model.layers.60.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
695
+ "model.layers.60.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
696
+ "model.layers.60.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
697
+ "model.layers.60.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
698
+ "model.layers.60.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
699
+ "model.layers.60.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
700
+ "model.layers.60.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
701
+ "model.layers.60.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
702
+ "model.layers.60.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
703
+ "model.layers.60.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
704
+ "model.layers.61.input_layernorm.weight": "model-00013-of-00014.safetensors",
705
+ "model.layers.61.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
706
+ "model.layers.61.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
707
+ "model.layers.61.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
708
+ "model.layers.61.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
709
+ "model.layers.61.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
710
+ "model.layers.61.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
711
+ "model.layers.61.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
712
+ "model.layers.61.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
713
+ "model.layers.61.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
714
+ "model.layers.61.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
715
+ "model.layers.61.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
716
+ "model.layers.62.input_layernorm.weight": "model-00013-of-00014.safetensors",
717
+ "model.layers.62.mlp.down_proj.weight": "model-00013-of-00014.safetensors",
718
+ "model.layers.62.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
719
+ "model.layers.62.mlp.up_proj.weight": "model-00013-of-00014.safetensors",
720
+ "model.layers.62.post_attention_layernorm.weight": "model-00013-of-00014.safetensors",
721
+ "model.layers.62.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
722
+ "model.layers.62.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
723
+ "model.layers.62.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
724
+ "model.layers.62.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
725
+ "model.layers.62.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
726
+ "model.layers.62.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
727
+ "model.layers.62.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
728
+ "model.layers.63.input_layernorm.weight": "model-00014-of-00014.safetensors",
729
+ "model.layers.63.mlp.down_proj.weight": "model-00014-of-00014.safetensors",
730
+ "model.layers.63.mlp.gate_proj.weight": "model-00013-of-00014.safetensors",
731
+ "model.layers.63.mlp.up_proj.weight": "model-00014-of-00014.safetensors",
732
+ "model.layers.63.post_attention_layernorm.weight": "model-00014-of-00014.safetensors",
733
+ "model.layers.63.self_attn.k_proj.bias": "model-00013-of-00014.safetensors",
734
+ "model.layers.63.self_attn.k_proj.weight": "model-00013-of-00014.safetensors",
735
+ "model.layers.63.self_attn.o_proj.weight": "model-00013-of-00014.safetensors",
736
+ "model.layers.63.self_attn.q_proj.bias": "model-00013-of-00014.safetensors",
737
+ "model.layers.63.self_attn.q_proj.weight": "model-00013-of-00014.safetensors",
738
+ "model.layers.63.self_attn.v_proj.bias": "model-00013-of-00014.safetensors",
739
+ "model.layers.63.self_attn.v_proj.weight": "model-00013-of-00014.safetensors",
740
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00014.safetensors",
741
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00014.safetensors",
742
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
743
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00014.safetensors",
744
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00014.safetensors",
745
+ "model.layers.7.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
746
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
747
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
748
+ "model.layers.7.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
749
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
750
+ "model.layers.7.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
751
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
752
+ "model.layers.8.input_layernorm.weight": "model-00003-of-00014.safetensors",
753
+ "model.layers.8.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
754
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00014.safetensors",
755
+ "model.layers.8.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
756
+ "model.layers.8.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
757
+ "model.layers.8.self_attn.k_proj.bias": "model-00002-of-00014.safetensors",
758
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00014.safetensors",
759
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00014.safetensors",
760
+ "model.layers.8.self_attn.q_proj.bias": "model-00002-of-00014.safetensors",
761
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00014.safetensors",
762
+ "model.layers.8.self_attn.v_proj.bias": "model-00002-of-00014.safetensors",
763
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00014.safetensors",
764
+ "model.layers.9.input_layernorm.weight": "model-00003-of-00014.safetensors",
765
+ "model.layers.9.mlp.down_proj.weight": "model-00003-of-00014.safetensors",
766
+ "model.layers.9.mlp.gate_proj.weight": "model-00003-of-00014.safetensors",
767
+ "model.layers.9.mlp.up_proj.weight": "model-00003-of-00014.safetensors",
768
+ "model.layers.9.post_attention_layernorm.weight": "model-00003-of-00014.safetensors",
769
+ "model.layers.9.self_attn.k_proj.bias": "model-00003-of-00014.safetensors",
770
+ "model.layers.9.self_attn.k_proj.weight": "model-00003-of-00014.safetensors",
771
+ "model.layers.9.self_attn.o_proj.weight": "model-00003-of-00014.safetensors",
772
+ "model.layers.9.self_attn.q_proj.bias": "model-00003-of-00014.safetensors",
773
+ "model.layers.9.self_attn.q_proj.weight": "model-00003-of-00014.safetensors",
774
+ "model.layers.9.self_attn.v_proj.bias": "model-00003-of-00014.safetensors",
775
+ "model.layers.9.self_attn.v_proj.weight": "model-00003-of-00014.safetensors",
776
+ "model.norm.weight": "model-00014-of-00014.safetensors"
777
+ }
778
+ }
checkpoint-375/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad8a35afd8967cbb748405387e44426e43ad127028e826eddc9b67d2ca873c85
3
+ size 15984
checkpoint-375/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f338ce80d7c441076bfc8c53b84067a0181f5a14e80c13d5acb8150b659f4d73
3
+ size 15984
checkpoint-375/rng_state_2.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9fbc9fa428939be10b46779f0eb5cd833e0da426b1cbdee77b3a55b6952235b
3
+ size 15984
checkpoint-375/rng_state_3.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac55dba0b79d5fa4699d239da2f966d52040d576d31234ac8d4632e6956481bc
3
+ size 15984
checkpoint-375/rng_state_4.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:af2d0c015100768ffa23faf3b6c2d54ea89eb045603e30e55cd211e06ff34972
3
+ size 15984
checkpoint-375/rng_state_5.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c60a1b40608e34bc801c8231f97b81c53b5290dfaed1b9cd0ccbeca29574a991
3
+ size 15984
checkpoint-375/rng_state_6.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ad6a142a403eb9aafc4a3a9a856bca648fe31fd22d796867baca31fb13656aa
3
+ size 15984
checkpoint-375/rng_state_7.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:38bc23a138cc800b22881742c0f3f9a71731a9a7111c6058a0077e6274d21773
3
+ size 15984
checkpoint-375/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ea4c80f97906b06bf11e09798ee50dca4af9fd4fe1df2d487c26eedfbc10005b
3
+ size 1064
checkpoint-375/special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<|endoftext|>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
checkpoint-375/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aeb13307a71acd8fe81861d94ad54ab689df773318809eed3cbe794b4492dae4
3
+ size 11422654
checkpoint-375/tokenizer_config.json ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<tool_response>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": false
188
+ },
189
+ "151666": {
190
+ "content": "</tool_response>",
191
+ "lstrip": false,
192
+ "normalized": false,
193
+ "rstrip": false,
194
+ "single_word": false,
195
+ "special": false
196
+ },
197
+ "151667": {
198
+ "content": "<think>",
199
+ "lstrip": false,
200
+ "normalized": false,
201
+ "rstrip": false,
202
+ "single_word": false,
203
+ "special": false
204
+ },
205
+ "151668": {
206
+ "content": "</think>",
207
+ "lstrip": false,
208
+ "normalized": false,
209
+ "rstrip": false,
210
+ "single_word": false,
211
+ "special": false
212
+ }
213
+ },
214
+ "additional_special_tokens": [
215
+ "<|im_start|>",
216
+ "<|im_end|>",
217
+ "<|object_ref_start|>",
218
+ "<|object_ref_end|>",
219
+ "<|box_start|>",
220
+ "<|box_end|>",
221
+ "<|quad_start|>",
222
+ "<|quad_end|>",
223
+ "<|vision_start|>",
224
+ "<|vision_end|>",
225
+ "<|vision_pad|>",
226
+ "<|image_pad|>",
227
+ "<|video_pad|>"
228
+ ],
229
+ "bos_token": null,
230
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- '' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" and not message.tool_calls %}\n {%- set content = message.content.split('</think>')[-1].lstrip('\\n') %}\n {{- '<|im_start|>' + message.role + '\\n' + content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {%- set content = message.content.split('</think>')[-1].lstrip('\\n') %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n<think>\\n' }}\n{%- endif %}\n",
231
+ "clean_up_tokenization_spaces": false,
232
+ "eos_token": "<|im_end|>",
233
+ "errors": "replace",
234
+ "extra_special_tokens": {},
235
+ "model_max_length": 131072,
236
+ "pad_token": "<|endoftext|>",
237
+ "split_special_tokens": false,
238
+ "tokenizer_class": "Qwen2Tokenizer",
239
+ "unk_token": null
240
+ }
checkpoint-375/trainer_state.json ADDED
@@ -0,0 +1,2658 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.5,
5
+ "eval_steps": 500,
6
+ "global_step": 375,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.004,
13
+ "grad_norm": 2.7120276745865266,
14
+ "learning_rate": 1.2500000000000002e-07,
15
+ "loss": 0.9478,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.008,
20
+ "grad_norm": 2.5535110233770237,
21
+ "learning_rate": 2.5000000000000004e-07,
22
+ "loss": 0.962,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.012,
27
+ "grad_norm": 2.6421564999361533,
28
+ "learning_rate": 3.75e-07,
29
+ "loss": 0.9483,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.016,
34
+ "grad_norm": 2.5842713407482596,
35
+ "learning_rate": 5.000000000000001e-07,
36
+ "loss": 0.9484,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.02,
41
+ "grad_norm": 2.5281267639976615,
42
+ "learning_rate": 6.25e-07,
43
+ "loss": 0.9512,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.024,
48
+ "grad_norm": 2.55003561639048,
49
+ "learning_rate": 7.5e-07,
50
+ "loss": 0.9185,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.028,
55
+ "grad_norm": 2.5014092783190947,
56
+ "learning_rate": 8.75e-07,
57
+ "loss": 0.9411,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.032,
62
+ "grad_norm": 2.5247715994428046,
63
+ "learning_rate": 1.0000000000000002e-06,
64
+ "loss": 0.9474,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.036,
69
+ "grad_norm": 2.564090844776098,
70
+ "learning_rate": 1.125e-06,
71
+ "loss": 0.9246,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.04,
76
+ "grad_norm": 2.5258707026382154,
77
+ "learning_rate": 1.25e-06,
78
+ "loss": 0.9078,
79
+ "step": 10
80
+ },
81
+ {
82
+ "epoch": 0.044,
83
+ "grad_norm": 2.5471156175078464,
84
+ "learning_rate": 1.3750000000000002e-06,
85
+ "loss": 0.9134,
86
+ "step": 11
87
+ },
88
+ {
89
+ "epoch": 0.048,
90
+ "grad_norm": 2.7125301372042467,
91
+ "learning_rate": 1.5e-06,
92
+ "loss": 0.8776,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.052,
97
+ "grad_norm": 2.150392048325219,
98
+ "learning_rate": 1.6250000000000001e-06,
99
+ "loss": 0.8549,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.056,
104
+ "grad_norm": 1.8184980673856375,
105
+ "learning_rate": 1.75e-06,
106
+ "loss": 0.8169,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.06,
111
+ "grad_norm": 1.7529184087805771,
112
+ "learning_rate": 1.8750000000000003e-06,
113
+ "loss": 0.7975,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.064,
118
+ "grad_norm": 1.7474840928555682,
119
+ "learning_rate": 2.0000000000000003e-06,
120
+ "loss": 0.8204,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.068,
125
+ "grad_norm": 1.522251241150409,
126
+ "learning_rate": 2.125e-06,
127
+ "loss": 0.8301,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.072,
132
+ "grad_norm": 0.9782588646220091,
133
+ "learning_rate": 2.25e-06,
134
+ "loss": 0.7839,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.076,
139
+ "grad_norm": 0.975162832649703,
140
+ "learning_rate": 2.375e-06,
141
+ "loss": 0.7876,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.08,
146
+ "grad_norm": 0.8952616849532462,
147
+ "learning_rate": 2.5e-06,
148
+ "loss": 0.7589,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.084,
153
+ "grad_norm": 0.8469187411478404,
154
+ "learning_rate": 2.6250000000000003e-06,
155
+ "loss": 0.7567,
156
+ "step": 21
157
+ },
158
+ {
159
+ "epoch": 0.088,
160
+ "grad_norm": 0.7683035597272257,
161
+ "learning_rate": 2.7500000000000004e-06,
162
+ "loss": 0.7598,
163
+ "step": 22
164
+ },
165
+ {
166
+ "epoch": 0.092,
167
+ "grad_norm": 0.6779719620181055,
168
+ "learning_rate": 2.875e-06,
169
+ "loss": 0.7431,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.096,
174
+ "grad_norm": 0.5998651036445499,
175
+ "learning_rate": 3e-06,
176
+ "loss": 0.7348,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 0.1,
181
+ "grad_norm": 0.6798779251188986,
182
+ "learning_rate": 3.125e-06,
183
+ "loss": 0.7271,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 0.104,
188
+ "grad_norm": 0.7459028436934305,
189
+ "learning_rate": 3.2500000000000002e-06,
190
+ "loss": 0.7011,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 0.108,
195
+ "grad_norm": 0.7661666147630528,
196
+ "learning_rate": 3.3750000000000003e-06,
197
+ "loss": 0.7209,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 0.112,
202
+ "grad_norm": 0.7109730987249342,
203
+ "learning_rate": 3.5e-06,
204
+ "loss": 0.7118,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 0.116,
209
+ "grad_norm": 0.6778217985942258,
210
+ "learning_rate": 3.625e-06,
211
+ "loss": 0.7295,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 0.12,
216
+ "grad_norm": 0.6073429084137083,
217
+ "learning_rate": 3.7500000000000005e-06,
218
+ "loss": 0.7114,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 0.124,
223
+ "grad_norm": 0.5325413632521706,
224
+ "learning_rate": 3.875e-06,
225
+ "loss": 0.7117,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 0.128,
230
+ "grad_norm": 0.5170445125401208,
231
+ "learning_rate": 4.000000000000001e-06,
232
+ "loss": 0.7132,
233
+ "step": 32
234
+ },
235
+ {
236
+ "epoch": 0.132,
237
+ "grad_norm": 0.47459002896590347,
238
+ "learning_rate": 4.125e-06,
239
+ "loss": 0.6913,
240
+ "step": 33
241
+ },
242
+ {
243
+ "epoch": 0.136,
244
+ "grad_norm": 0.46611890152884594,
245
+ "learning_rate": 4.25e-06,
246
+ "loss": 0.6893,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 0.14,
251
+ "grad_norm": 0.5282002224942279,
252
+ "learning_rate": 4.3750000000000005e-06,
253
+ "loss": 0.7059,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 0.144,
258
+ "grad_norm": 0.516098581069478,
259
+ "learning_rate": 4.5e-06,
260
+ "loss": 0.7068,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 0.148,
265
+ "grad_norm": 0.4946613953020982,
266
+ "learning_rate": 4.625000000000001e-06,
267
+ "loss": 0.6854,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 0.152,
272
+ "grad_norm": 0.45539519616935026,
273
+ "learning_rate": 4.75e-06,
274
+ "loss": 0.689,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 0.156,
279
+ "grad_norm": 0.43098619176364883,
280
+ "learning_rate": 4.875e-06,
281
+ "loss": 0.6904,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 0.16,
286
+ "grad_norm": 0.42693567901762375,
287
+ "learning_rate": 5e-06,
288
+ "loss": 0.6713,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 0.164,
293
+ "grad_norm": 0.42916236211888303,
294
+ "learning_rate": 4.999941696797974e-06,
295
+ "loss": 0.6929,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 0.168,
300
+ "grad_norm": 0.46320958381108696,
301
+ "learning_rate": 4.9997667899113055e-06,
302
+ "loss": 0.6601,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 0.172,
307
+ "grad_norm": 0.4650598350234117,
308
+ "learning_rate": 4.9994752874981e-06,
309
+ "loss": 0.6627,
310
+ "step": 43
311
+ },
312
+ {
313
+ "epoch": 0.176,
314
+ "grad_norm": 0.4899967026584299,
315
+ "learning_rate": 4.999067203154777e-06,
316
+ "loss": 0.6583,
317
+ "step": 44
318
+ },
319
+ {
320
+ "epoch": 0.18,
321
+ "grad_norm": 0.4562804538758281,
322
+ "learning_rate": 4.998542555915435e-06,
323
+ "loss": 0.6712,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 0.184,
328
+ "grad_norm": 0.36547047335003885,
329
+ "learning_rate": 4.997901370250966e-06,
330
+ "loss": 0.6495,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 0.188,
335
+ "grad_norm": 0.37624066260466155,
336
+ "learning_rate": 4.997143676067913e-06,
337
+ "loss": 0.6703,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 0.192,
342
+ "grad_norm": 0.36653954523845933,
343
+ "learning_rate": 4.99626950870707e-06,
344
+ "loss": 0.6781,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 0.196,
349
+ "grad_norm": 0.3477765639450634,
350
+ "learning_rate": 4.995278908941845e-06,
351
+ "loss": 0.6745,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 0.2,
356
+ "grad_norm": 0.3342296714847872,
357
+ "learning_rate": 4.994171922976349e-06,
358
+ "loss": 0.6771,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 0.204,
363
+ "grad_norm": 0.37958792690250714,
364
+ "learning_rate": 4.9929486024432405e-06,
365
+ "loss": 0.6546,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 0.208,
370
+ "grad_norm": 0.40870653964939113,
371
+ "learning_rate": 4.991609004401324e-06,
372
+ "loss": 0.6528,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 0.212,
377
+ "grad_norm": 0.364192633459814,
378
+ "learning_rate": 4.990153191332885e-06,
379
+ "loss": 0.6458,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 0.216,
384
+ "grad_norm": 0.30668416089546924,
385
+ "learning_rate": 4.988581231140772e-06,
386
+ "loss": 0.672,
387
+ "step": 54
388
+ },
389
+ {
390
+ "epoch": 0.22,
391
+ "grad_norm": 0.33255921507240616,
392
+ "learning_rate": 4.986893197145238e-06,
393
+ "loss": 0.6575,
394
+ "step": 55
395
+ },
396
+ {
397
+ "epoch": 0.224,
398
+ "grad_norm": 0.312428536241603,
399
+ "learning_rate": 4.985089168080509e-06,
400
+ "loss": 0.6629,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 0.228,
405
+ "grad_norm": 0.2961748326875195,
406
+ "learning_rate": 4.983169228091125e-06,
407
+ "loss": 0.6514,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 0.232,
412
+ "grad_norm": 0.30513153025857964,
413
+ "learning_rate": 4.981133466728004e-06,
414
+ "loss": 0.6547,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 0.236,
419
+ "grad_norm": 0.3274000044829569,
420
+ "learning_rate": 4.978981978944271e-06,
421
+ "loss": 0.6514,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 0.24,
426
+ "grad_norm": 0.3083977406610579,
427
+ "learning_rate": 4.976714865090827e-06,
428
+ "loss": 0.6422,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 0.244,
433
+ "grad_norm": 0.3052292638265554,
434
+ "learning_rate": 4.97433223091167e-06,
435
+ "loss": 0.6355,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 0.248,
440
+ "grad_norm": 0.2959593176715387,
441
+ "learning_rate": 4.971834187538963e-06,
442
+ "loss": 0.6551,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 0.252,
447
+ "grad_norm": 0.29042322618949995,
448
+ "learning_rate": 4.9692208514878445e-06,
449
+ "loss": 0.6581,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 0.256,
454
+ "grad_norm": 0.3149031676109748,
455
+ "learning_rate": 4.966492344651006e-06,
456
+ "loss": 0.6594,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 0.26,
461
+ "grad_norm": 0.2874450280533471,
462
+ "learning_rate": 4.963648794292992e-06,
463
+ "loss": 0.6488,
464
+ "step": 65
465
+ },
466
+ {
467
+ "epoch": 0.264,
468
+ "grad_norm": 0.27325192501172824,
469
+ "learning_rate": 4.960690333044279e-06,
470
+ "loss": 0.6351,
471
+ "step": 66
472
+ },
473
+ {
474
+ "epoch": 0.268,
475
+ "grad_norm": 0.2817944445002008,
476
+ "learning_rate": 4.957617098895076e-06,
477
+ "loss": 0.6265,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 0.272,
482
+ "grad_norm": 0.281980621240097,
483
+ "learning_rate": 4.954429235188897e-06,
484
+ "loss": 0.6432,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 0.276,
489
+ "grad_norm": 0.2962526841715817,
490
+ "learning_rate": 4.951126890615871e-06,
491
+ "loss": 0.6354,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 0.28,
496
+ "grad_norm": 0.28005347365677186,
497
+ "learning_rate": 4.947710219205808e-06,
498
+ "loss": 0.6548,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 0.284,
503
+ "grad_norm": 0.26960325063378,
504
+ "learning_rate": 4.944179380321015e-06,
505
+ "loss": 0.6125,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 0.288,
510
+ "grad_norm": 0.2848666105192882,
511
+ "learning_rate": 4.940534538648862e-06,
512
+ "loss": 0.6078,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 0.292,
517
+ "grad_norm": 0.276353304899713,
518
+ "learning_rate": 4.936775864194101e-06,
519
+ "loss": 0.6524,
520
+ "step": 73
521
+ },
522
+ {
523
+ "epoch": 0.296,
524
+ "grad_norm": 0.27790715839174,
525
+ "learning_rate": 4.932903532270939e-06,
526
+ "loss": 0.6275,
527
+ "step": 74
528
+ },
529
+ {
530
+ "epoch": 0.3,
531
+ "grad_norm": 0.2636499704034946,
532
+ "learning_rate": 4.928917723494854e-06,
533
+ "loss": 0.6422,
534
+ "step": 75
535
+ },
536
+ {
537
+ "epoch": 0.304,
538
+ "grad_norm": 0.28133008746598603,
539
+ "learning_rate": 4.924818623774178e-06,
540
+ "loss": 0.6355,
541
+ "step": 76
542
+ },
543
+ {
544
+ "epoch": 0.308,
545
+ "grad_norm": 0.2779410264901997,
546
+ "learning_rate": 4.920606424301424e-06,
547
+ "loss": 0.6408,
548
+ "step": 77
549
+ },
550
+ {
551
+ "epoch": 0.312,
552
+ "grad_norm": 0.2897057665750394,
553
+ "learning_rate": 4.916281321544362e-06,
554
+ "loss": 0.6402,
555
+ "step": 78
556
+ },
557
+ {
558
+ "epoch": 0.316,
559
+ "grad_norm": 0.2761996704216673,
560
+ "learning_rate": 4.911843517236867e-06,
561
+ "loss": 0.653,
562
+ "step": 79
563
+ },
564
+ {
565
+ "epoch": 0.32,
566
+ "grad_norm": 0.29413202875425243,
567
+ "learning_rate": 4.907293218369499e-06,
568
+ "loss": 0.6298,
569
+ "step": 80
570
+ },
571
+ {
572
+ "epoch": 0.324,
573
+ "grad_norm": 0.2861680279307969,
574
+ "learning_rate": 4.9026306371798526e-06,
575
+ "loss": 0.6553,
576
+ "step": 81
577
+ },
578
+ {
579
+ "epoch": 0.328,
580
+ "grad_norm": 0.2848851913581759,
581
+ "learning_rate": 4.897855991142658e-06,
582
+ "loss": 0.6076,
583
+ "step": 82
584
+ },
585
+ {
586
+ "epoch": 0.332,
587
+ "grad_norm": 0.2687848093353044,
588
+ "learning_rate": 4.892969502959639e-06,
589
+ "loss": 0.6311,
590
+ "step": 83
591
+ },
592
+ {
593
+ "epoch": 0.336,
594
+ "grad_norm": 0.27061397240144314,
595
+ "learning_rate": 4.8879714005491205e-06,
596
+ "loss": 0.6148,
597
+ "step": 84
598
+ },
599
+ {
600
+ "epoch": 0.34,
601
+ "grad_norm": 0.2663532374960906,
602
+ "learning_rate": 4.882861917035403e-06,
603
+ "loss": 0.6412,
604
+ "step": 85
605
+ },
606
+ {
607
+ "epoch": 0.344,
608
+ "grad_norm": 0.27752950332016424,
609
+ "learning_rate": 4.8776412907378845e-06,
610
+ "loss": 0.642,
611
+ "step": 86
612
+ },
613
+ {
614
+ "epoch": 0.348,
615
+ "grad_norm": 0.2772792789989426,
616
+ "learning_rate": 4.87230976515995e-06,
617
+ "loss": 0.6184,
618
+ "step": 87
619
+ },
620
+ {
621
+ "epoch": 0.352,
622
+ "grad_norm": 0.29699617925202587,
623
+ "learning_rate": 4.8668675889776095e-06,
624
+ "loss": 0.6275,
625
+ "step": 88
626
+ },
627
+ {
628
+ "epoch": 0.356,
629
+ "grad_norm": 0.2736517750983177,
630
+ "learning_rate": 4.861315016027902e-06,
631
+ "loss": 0.6347,
632
+ "step": 89
633
+ },
634
+ {
635
+ "epoch": 0.36,
636
+ "grad_norm": 0.2871346395772671,
637
+ "learning_rate": 4.855652305297052e-06,
638
+ "loss": 0.6132,
639
+ "step": 90
640
+ },
641
+ {
642
+ "epoch": 0.364,
643
+ "grad_norm": 0.27131438304958966,
644
+ "learning_rate": 4.849879720908394e-06,
645
+ "loss": 0.6026,
646
+ "step": 91
647
+ },
648
+ {
649
+ "epoch": 0.368,
650
+ "grad_norm": 0.2701400539855458,
651
+ "learning_rate": 4.843997532110051e-06,
652
+ "loss": 0.6385,
653
+ "step": 92
654
+ },
655
+ {
656
+ "epoch": 0.372,
657
+ "grad_norm": 0.2870430498497826,
658
+ "learning_rate": 4.8380060132623776e-06,
659
+ "loss": 0.624,
660
+ "step": 93
661
+ },
662
+ {
663
+ "epoch": 0.376,
664
+ "grad_norm": 0.2889305346509075,
665
+ "learning_rate": 4.83190544382516e-06,
666
+ "loss": 0.6375,
667
+ "step": 94
668
+ },
669
+ {
670
+ "epoch": 0.38,
671
+ "grad_norm": 0.2832554152572974,
672
+ "learning_rate": 4.825696108344583e-06,
673
+ "loss": 0.6348,
674
+ "step": 95
675
+ },
676
+ {
677
+ "epoch": 0.384,
678
+ "grad_norm": 0.282963454209734,
679
+ "learning_rate": 4.819378296439962e-06,
680
+ "loss": 0.6425,
681
+ "step": 96
682
+ },
683
+ {
684
+ "epoch": 0.388,
685
+ "grad_norm": 0.28136329883499284,
686
+ "learning_rate": 4.812952302790226e-06,
687
+ "loss": 0.6238,
688
+ "step": 97
689
+ },
690
+ {
691
+ "epoch": 0.392,
692
+ "grad_norm": 0.2751896445755537,
693
+ "learning_rate": 4.80641842712018e-06,
694
+ "loss": 0.6453,
695
+ "step": 98
696
+ },
697
+ {
698
+ "epoch": 0.396,
699
+ "grad_norm": 0.2802941094985609,
700
+ "learning_rate": 4.799776974186523e-06,
701
+ "loss": 0.6362,
702
+ "step": 99
703
+ },
704
+ {
705
+ "epoch": 0.4,
706
+ "grad_norm": 0.2816828378634338,
707
+ "learning_rate": 4.793028253763633e-06,
708
+ "loss": 0.6394,
709
+ "step": 100
710
+ },
711
+ {
712
+ "epoch": 0.404,
713
+ "grad_norm": 0.28942806582562414,
714
+ "learning_rate": 4.786172580629118e-06,
715
+ "loss": 0.6106,
716
+ "step": 101
717
+ },
718
+ {
719
+ "epoch": 0.408,
720
+ "grad_norm": 0.28396225609673553,
721
+ "learning_rate": 4.7792102745491345e-06,
722
+ "loss": 0.6302,
723
+ "step": 102
724
+ },
725
+ {
726
+ "epoch": 0.412,
727
+ "grad_norm": 0.269131748983874,
728
+ "learning_rate": 4.772141660263472e-06,
729
+ "loss": 0.6247,
730
+ "step": 103
731
+ },
732
+ {
733
+ "epoch": 0.416,
734
+ "grad_norm": 0.28932510717249166,
735
+ "learning_rate": 4.764967067470409e-06,
736
+ "loss": 0.6201,
737
+ "step": 104
738
+ },
739
+ {
740
+ "epoch": 0.42,
741
+ "grad_norm": 0.2734040302258933,
742
+ "learning_rate": 4.757686830811332e-06,
743
+ "loss": 0.6092,
744
+ "step": 105
745
+ },
746
+ {
747
+ "epoch": 0.424,
748
+ "grad_norm": 0.28628481564556507,
749
+ "learning_rate": 4.750301289855128e-06,
750
+ "loss": 0.6284,
751
+ "step": 106
752
+ },
753
+ {
754
+ "epoch": 0.428,
755
+ "grad_norm": 0.30932729909286755,
756
+ "learning_rate": 4.742810789082345e-06,
757
+ "loss": 0.6332,
758
+ "step": 107
759
+ },
760
+ {
761
+ "epoch": 0.432,
762
+ "grad_norm": 0.2707515621211506,
763
+ "learning_rate": 4.735215677869129e-06,
764
+ "loss": 0.6107,
765
+ "step": 108
766
+ },
767
+ {
768
+ "epoch": 0.436,
769
+ "grad_norm": 0.29859925040242785,
770
+ "learning_rate": 4.72751631047092e-06,
771
+ "loss": 0.6477,
772
+ "step": 109
773
+ },
774
+ {
775
+ "epoch": 0.44,
776
+ "grad_norm": 0.2974079849667466,
777
+ "learning_rate": 4.7197130460059385e-06,
778
+ "loss": 0.632,
779
+ "step": 110
780
+ },
781
+ {
782
+ "epoch": 0.444,
783
+ "grad_norm": 0.28109309454092835,
784
+ "learning_rate": 4.711806248438428e-06,
785
+ "loss": 0.6308,
786
+ "step": 111
787
+ },
788
+ {
789
+ "epoch": 0.448,
790
+ "grad_norm": 0.287627270908265,
791
+ "learning_rate": 4.7037962865616795e-06,
792
+ "loss": 0.6322,
793
+ "step": 112
794
+ },
795
+ {
796
+ "epoch": 0.452,
797
+ "grad_norm": 0.2859506113795605,
798
+ "learning_rate": 4.695683533980835e-06,
799
+ "loss": 0.6196,
800
+ "step": 113
801
+ },
802
+ {
803
+ "epoch": 0.456,
804
+ "grad_norm": 0.27768505471724575,
805
+ "learning_rate": 4.687468369095457e-06,
806
+ "loss": 0.6107,
807
+ "step": 114
808
+ },
809
+ {
810
+ "epoch": 0.46,
811
+ "grad_norm": 0.2985912133376052,
812
+ "learning_rate": 4.679151175081879e-06,
813
+ "loss": 0.6316,
814
+ "step": 115
815
+ },
816
+ {
817
+ "epoch": 0.464,
818
+ "grad_norm": 0.27790254502289174,
819
+ "learning_rate": 4.6707323398753346e-06,
820
+ "loss": 0.6194,
821
+ "step": 116
822
+ },
823
+ {
824
+ "epoch": 0.468,
825
+ "grad_norm": 0.2770496916475714,
826
+ "learning_rate": 4.662212256151865e-06,
827
+ "loss": 0.5938,
828
+ "step": 117
829
+ },
830
+ {
831
+ "epoch": 0.472,
832
+ "grad_norm": 0.2750067208531331,
833
+ "learning_rate": 4.6535913213100005e-06,
834
+ "loss": 0.6125,
835
+ "step": 118
836
+ },
837
+ {
838
+ "epoch": 0.476,
839
+ "grad_norm": 0.2640533408637943,
840
+ "learning_rate": 4.644869937452224e-06,
841
+ "loss": 0.6245,
842
+ "step": 119
843
+ },
844
+ {
845
+ "epoch": 0.48,
846
+ "grad_norm": 0.27591929390872805,
847
+ "learning_rate": 4.636048511366222e-06,
848
+ "loss": 0.6186,
849
+ "step": 120
850
+ },
851
+ {
852
+ "epoch": 0.484,
853
+ "grad_norm": 0.29643778935625803,
854
+ "learning_rate": 4.627127454505902e-06,
855
+ "loss": 0.6086,
856
+ "step": 121
857
+ },
858
+ {
859
+ "epoch": 0.488,
860
+ "grad_norm": 0.28076149524670235,
861
+ "learning_rate": 4.618107182972209e-06,
862
+ "loss": 0.6158,
863
+ "step": 122
864
+ },
865
+ {
866
+ "epoch": 0.492,
867
+ "grad_norm": 0.28214439623978305,
868
+ "learning_rate": 4.6089881174937146e-06,
869
+ "loss": 0.6299,
870
+ "step": 123
871
+ },
872
+ {
873
+ "epoch": 0.496,
874
+ "grad_norm": 0.2885430172419291,
875
+ "learning_rate": 4.599770683406992e-06,
876
+ "loss": 0.6367,
877
+ "step": 124
878
+ },
879
+ {
880
+ "epoch": 0.5,
881
+ "grad_norm": 0.28978271139767015,
882
+ "learning_rate": 4.590455310636778e-06,
883
+ "loss": 0.6248,
884
+ "step": 125
885
+ },
886
+ {
887
+ "epoch": 0.504,
888
+ "grad_norm": 0.2763777911865909,
889
+ "learning_rate": 4.58104243367592e-06,
890
+ "loss": 0.6235,
891
+ "step": 126
892
+ },
893
+ {
894
+ "epoch": 0.508,
895
+ "grad_norm": 0.2788581607213461,
896
+ "learning_rate": 4.571532491565115e-06,
897
+ "loss": 0.6201,
898
+ "step": 127
899
+ },
900
+ {
901
+ "epoch": 0.512,
902
+ "grad_norm": 0.28522419436506885,
903
+ "learning_rate": 4.561925927872421e-06,
904
+ "loss": 0.6287,
905
+ "step": 128
906
+ },
907
+ {
908
+ "epoch": 0.516,
909
+ "grad_norm": 0.2759422238312871,
910
+ "learning_rate": 4.55222319067258e-06,
911
+ "loss": 0.6105,
912
+ "step": 129
913
+ },
914
+ {
915
+ "epoch": 0.52,
916
+ "grad_norm": 0.271521828303117,
917
+ "learning_rate": 4.542424732526105e-06,
918
+ "loss": 0.6004,
919
+ "step": 130
920
+ },
921
+ {
922
+ "epoch": 0.524,
923
+ "grad_norm": 0.2832768486502443,
924
+ "learning_rate": 4.532531010458188e-06,
925
+ "loss": 0.6438,
926
+ "step": 131
927
+ },
928
+ {
929
+ "epoch": 0.528,
930
+ "grad_norm": 0.28545986352466657,
931
+ "learning_rate": 4.522542485937369e-06,
932
+ "loss": 0.6147,
933
+ "step": 132
934
+ },
935
+ {
936
+ "epoch": 0.532,
937
+ "grad_norm": 0.2843650568512383,
938
+ "learning_rate": 4.512459624854017e-06,
939
+ "loss": 0.6347,
940
+ "step": 133
941
+ },
942
+ {
943
+ "epoch": 0.536,
944
+ "grad_norm": 0.2758779923686556,
945
+ "learning_rate": 4.5022828974986044e-06,
946
+ "loss": 0.6111,
947
+ "step": 134
948
+ },
949
+ {
950
+ "epoch": 0.54,
951
+ "grad_norm": 0.28471240078326554,
952
+ "learning_rate": 4.4920127785397615e-06,
953
+ "loss": 0.6161,
954
+ "step": 135
955
+ },
956
+ {
957
+ "epoch": 0.544,
958
+ "grad_norm": 0.27215538114487603,
959
+ "learning_rate": 4.481649747002146e-06,
960
+ "loss": 0.6019,
961
+ "step": 136
962
+ },
963
+ {
964
+ "epoch": 0.548,
965
+ "grad_norm": 0.27161590017753495,
966
+ "learning_rate": 4.471194286244094e-06,
967
+ "loss": 0.6229,
968
+ "step": 137
969
+ },
970
+ {
971
+ "epoch": 0.552,
972
+ "grad_norm": 0.2786884282741861,
973
+ "learning_rate": 4.460646883935079e-06,
974
+ "loss": 0.6217,
975
+ "step": 138
976
+ },
977
+ {
978
+ "epoch": 0.556,
979
+ "grad_norm": 0.29095908793086706,
980
+ "learning_rate": 4.4500080320329615e-06,
981
+ "loss": 0.6212,
982
+ "step": 139
983
+ },
984
+ {
985
+ "epoch": 0.56,
986
+ "grad_norm": 0.2797512942233689,
987
+ "learning_rate": 4.43927822676105e-06,
988
+ "loss": 0.6183,
989
+ "step": 140
990
+ },
991
+ {
992
+ "epoch": 0.564,
993
+ "grad_norm": 0.2701904530059608,
994
+ "learning_rate": 4.428457968584945e-06,
995
+ "loss": 0.6067,
996
+ "step": 141
997
+ },
998
+ {
999
+ "epoch": 0.568,
1000
+ "grad_norm": 0.2924071263588622,
1001
+ "learning_rate": 4.417547762189207e-06,
1002
+ "loss": 0.6167,
1003
+ "step": 142
1004
+ },
1005
+ {
1006
+ "epoch": 0.572,
1007
+ "grad_norm": 0.2684300131690406,
1008
+ "learning_rate": 4.40654811645381e-06,
1009
+ "loss": 0.6185,
1010
+ "step": 143
1011
+ },
1012
+ {
1013
+ "epoch": 0.576,
1014
+ "grad_norm": 0.2774759359262972,
1015
+ "learning_rate": 4.395459544430407e-06,
1016
+ "loss": 0.602,
1017
+ "step": 144
1018
+ },
1019
+ {
1020
+ "epoch": 0.58,
1021
+ "grad_norm": 0.2808643430953345,
1022
+ "learning_rate": 4.384282563318403e-06,
1023
+ "loss": 0.598,
1024
+ "step": 145
1025
+ },
1026
+ {
1027
+ "epoch": 0.584,
1028
+ "grad_norm": 0.27015365579319356,
1029
+ "learning_rate": 4.373017694440828e-06,
1030
+ "loss": 0.5857,
1031
+ "step": 146
1032
+ },
1033
+ {
1034
+ "epoch": 0.588,
1035
+ "grad_norm": 0.2856861787094523,
1036
+ "learning_rate": 4.361665463220023e-06,
1037
+ "loss": 0.6206,
1038
+ "step": 147
1039
+ },
1040
+ {
1041
+ "epoch": 0.592,
1042
+ "grad_norm": 0.28199517014381215,
1043
+ "learning_rate": 4.35022639915313e-06,
1044
+ "loss": 0.6094,
1045
+ "step": 148
1046
+ },
1047
+ {
1048
+ "epoch": 0.596,
1049
+ "grad_norm": 0.27010314199532126,
1050
+ "learning_rate": 4.338701035787403e-06,
1051
+ "loss": 0.5947,
1052
+ "step": 149
1053
+ },
1054
+ {
1055
+ "epoch": 0.6,
1056
+ "grad_norm": 0.2601892508343049,
1057
+ "learning_rate": 4.32708991069531e-06,
1058
+ "loss": 0.5871,
1059
+ "step": 150
1060
+ },
1061
+ {
1062
+ "epoch": 0.604,
1063
+ "grad_norm": 0.27825064672033506,
1064
+ "learning_rate": 4.315393565449472e-06,
1065
+ "loss": 0.6093,
1066
+ "step": 151
1067
+ },
1068
+ {
1069
+ "epoch": 0.608,
1070
+ "grad_norm": 0.27958608277896724,
1071
+ "learning_rate": 4.30361254559739e-06,
1072
+ "loss": 0.5951,
1073
+ "step": 152
1074
+ },
1075
+ {
1076
+ "epoch": 0.612,
1077
+ "grad_norm": 0.2758779818206466,
1078
+ "learning_rate": 4.291747400636009e-06,
1079
+ "loss": 0.6062,
1080
+ "step": 153
1081
+ },
1082
+ {
1083
+ "epoch": 0.616,
1084
+ "grad_norm": 0.29572303953208817,
1085
+ "learning_rate": 4.279798683986084e-06,
1086
+ "loss": 0.605,
1087
+ "step": 154
1088
+ },
1089
+ {
1090
+ "epoch": 0.62,
1091
+ "grad_norm": 0.28194612739384267,
1092
+ "learning_rate": 4.267766952966369e-06,
1093
+ "loss": 0.6078,
1094
+ "step": 155
1095
+ },
1096
+ {
1097
+ "epoch": 0.624,
1098
+ "grad_norm": 0.28615376291544004,
1099
+ "learning_rate": 4.255652768767619e-06,
1100
+ "loss": 0.6319,
1101
+ "step": 156
1102
+ },
1103
+ {
1104
+ "epoch": 0.628,
1105
+ "grad_norm": 0.26959219285273633,
1106
+ "learning_rate": 4.243456696426415e-06,
1107
+ "loss": 0.5968,
1108
+ "step": 157
1109
+ },
1110
+ {
1111
+ "epoch": 0.632,
1112
+ "grad_norm": 0.27878753771339543,
1113
+ "learning_rate": 4.2311793047988145e-06,
1114
+ "loss": 0.6214,
1115
+ "step": 158
1116
+ },
1117
+ {
1118
+ "epoch": 0.636,
1119
+ "grad_norm": 0.2779844282953486,
1120
+ "learning_rate": 4.218821166533813e-06,
1121
+ "loss": 0.5964,
1122
+ "step": 159
1123
+ },
1124
+ {
1125
+ "epoch": 0.64,
1126
+ "grad_norm": 0.2767287929857217,
1127
+ "learning_rate": 4.206382858046636e-06,
1128
+ "loss": 0.6187,
1129
+ "step": 160
1130
+ },
1131
+ {
1132
+ "epoch": 0.644,
1133
+ "grad_norm": 0.2652936251998452,
1134
+ "learning_rate": 4.193864959491853e-06,
1135
+ "loss": 0.5897,
1136
+ "step": 161
1137
+ },
1138
+ {
1139
+ "epoch": 0.648,
1140
+ "grad_norm": 0.26227694980471933,
1141
+ "learning_rate": 4.181268054736319e-06,
1142
+ "loss": 0.6107,
1143
+ "step": 162
1144
+ },
1145
+ {
1146
+ "epoch": 0.652,
1147
+ "grad_norm": 0.2690441499487734,
1148
+ "learning_rate": 4.16859273133194e-06,
1149
+ "loss": 0.6012,
1150
+ "step": 163
1151
+ },
1152
+ {
1153
+ "epoch": 0.656,
1154
+ "grad_norm": 0.26934906424793176,
1155
+ "learning_rate": 4.15583958048827e-06,
1156
+ "loss": 0.6086,
1157
+ "step": 164
1158
+ },
1159
+ {
1160
+ "epoch": 0.66,
1161
+ "grad_norm": 0.2727839454931186,
1162
+ "learning_rate": 4.143009197044932e-06,
1163
+ "loss": 0.6156,
1164
+ "step": 165
1165
+ },
1166
+ {
1167
+ "epoch": 0.664,
1168
+ "grad_norm": 0.2767715664709689,
1169
+ "learning_rate": 4.130102179443877e-06,
1170
+ "loss": 0.607,
1171
+ "step": 166
1172
+ },
1173
+ {
1174
+ "epoch": 0.668,
1175
+ "grad_norm": 0.27462389864805775,
1176
+ "learning_rate": 4.117119129701468e-06,
1177
+ "loss": 0.598,
1178
+ "step": 167
1179
+ },
1180
+ {
1181
+ "epoch": 0.672,
1182
+ "grad_norm": 0.28124958800487015,
1183
+ "learning_rate": 4.104060653380403e-06,
1184
+ "loss": 0.6174,
1185
+ "step": 168
1186
+ },
1187
+ {
1188
+ "epoch": 0.676,
1189
+ "grad_norm": 0.26867080247614167,
1190
+ "learning_rate": 4.090927359561469e-06,
1191
+ "loss": 0.6222,
1192
+ "step": 169
1193
+ },
1194
+ {
1195
+ "epoch": 0.68,
1196
+ "grad_norm": 0.27329020109654967,
1197
+ "learning_rate": 4.077719860815132e-06,
1198
+ "loss": 0.6174,
1199
+ "step": 170
1200
+ },
1201
+ {
1202
+ "epoch": 0.684,
1203
+ "grad_norm": 0.2598239429892548,
1204
+ "learning_rate": 4.064438773172966e-06,
1205
+ "loss": 0.5949,
1206
+ "step": 171
1207
+ },
1208
+ {
1209
+ "epoch": 0.688,
1210
+ "grad_norm": 0.26610910407219807,
1211
+ "learning_rate": 4.051084716098921e-06,
1212
+ "loss": 0.5876,
1213
+ "step": 172
1214
+ },
1215
+ {
1216
+ "epoch": 0.692,
1217
+ "grad_norm": 0.28832248653224085,
1218
+ "learning_rate": 4.037658312460424e-06,
1219
+ "loss": 0.6038,
1220
+ "step": 173
1221
+ },
1222
+ {
1223
+ "epoch": 0.696,
1224
+ "grad_norm": 0.27995126745782395,
1225
+ "learning_rate": 4.024160188499337e-06,
1226
+ "loss": 0.6024,
1227
+ "step": 174
1228
+ },
1229
+ {
1230
+ "epoch": 0.7,
1231
+ "grad_norm": 0.2708024464743442,
1232
+ "learning_rate": 4.010590973802737e-06,
1233
+ "loss": 0.6166,
1234
+ "step": 175
1235
+ },
1236
+ {
1237
+ "epoch": 0.704,
1238
+ "grad_norm": 0.27379773644645394,
1239
+ "learning_rate": 3.996951301273556e-06,
1240
+ "loss": 0.6172,
1241
+ "step": 176
1242
+ },
1243
+ {
1244
+ "epoch": 0.708,
1245
+ "grad_norm": 0.2704403624062539,
1246
+ "learning_rate": 3.983241807101064e-06,
1247
+ "loss": 0.5848,
1248
+ "step": 177
1249
+ },
1250
+ {
1251
+ "epoch": 0.712,
1252
+ "grad_norm": 0.26367325554187204,
1253
+ "learning_rate": 3.969463130731183e-06,
1254
+ "loss": 0.6084,
1255
+ "step": 178
1256
+ },
1257
+ {
1258
+ "epoch": 0.716,
1259
+ "grad_norm": 0.2714449492216179,
1260
+ "learning_rate": 3.955615914836678e-06,
1261
+ "loss": 0.6067,
1262
+ "step": 179
1263
+ },
1264
+ {
1265
+ "epoch": 0.72,
1266
+ "grad_norm": 0.27396192782433526,
1267
+ "learning_rate": 3.941700805287169e-06,
1268
+ "loss": 0.6049,
1269
+ "step": 180
1270
+ },
1271
+ {
1272
+ "epoch": 0.724,
1273
+ "grad_norm": 0.2712108680127688,
1274
+ "learning_rate": 3.927718451119009e-06,
1275
+ "loss": 0.5981,
1276
+ "step": 181
1277
+ },
1278
+ {
1279
+ "epoch": 0.728,
1280
+ "grad_norm": 0.27016877733602884,
1281
+ "learning_rate": 3.913669504505015e-06,
1282
+ "loss": 0.6148,
1283
+ "step": 182
1284
+ },
1285
+ {
1286
+ "epoch": 0.732,
1287
+ "grad_norm": 0.2986908827790219,
1288
+ "learning_rate": 3.8995546207240455e-06,
1289
+ "loss": 0.6293,
1290
+ "step": 183
1291
+ },
1292
+ {
1293
+ "epoch": 0.736,
1294
+ "grad_norm": 0.27281610268420575,
1295
+ "learning_rate": 3.8853744581304376e-06,
1296
+ "loss": 0.5937,
1297
+ "step": 184
1298
+ },
1299
+ {
1300
+ "epoch": 0.74,
1301
+ "grad_norm": 0.28387181164952147,
1302
+ "learning_rate": 3.871129678123297e-06,
1303
+ "loss": 0.6098,
1304
+ "step": 185
1305
+ },
1306
+ {
1307
+ "epoch": 0.744,
1308
+ "grad_norm": 0.2740312224605285,
1309
+ "learning_rate": 3.856820945115655e-06,
1310
+ "loss": 0.6078,
1311
+ "step": 186
1312
+ },
1313
+ {
1314
+ "epoch": 0.748,
1315
+ "grad_norm": 0.2662514099930545,
1316
+ "learning_rate": 3.84244892650347e-06,
1317
+ "loss": 0.6254,
1318
+ "step": 187
1319
+ },
1320
+ {
1321
+ "epoch": 0.752,
1322
+ "grad_norm": 0.26802564374459203,
1323
+ "learning_rate": 3.828014292634508e-06,
1324
+ "loss": 0.6121,
1325
+ "step": 188
1326
+ },
1327
+ {
1328
+ "epoch": 0.756,
1329
+ "grad_norm": 0.28248517647364846,
1330
+ "learning_rate": 3.813517716777069e-06,
1331
+ "loss": 0.6202,
1332
+ "step": 189
1333
+ },
1334
+ {
1335
+ "epoch": 0.76,
1336
+ "grad_norm": 0.272622897496479,
1337
+ "learning_rate": 3.798959875088584e-06,
1338
+ "loss": 0.5901,
1339
+ "step": 190
1340
+ },
1341
+ {
1342
+ "epoch": 0.764,
1343
+ "grad_norm": 0.27197490333330376,
1344
+ "learning_rate": 3.7843414465840823e-06,
1345
+ "loss": 0.5856,
1346
+ "step": 191
1347
+ },
1348
+ {
1349
+ "epoch": 0.768,
1350
+ "grad_norm": 0.26663311683845875,
1351
+ "learning_rate": 3.769663113104516e-06,
1352
+ "loss": 0.5907,
1353
+ "step": 192
1354
+ },
1355
+ {
1356
+ "epoch": 0.772,
1357
+ "grad_norm": 0.2714585682015405,
1358
+ "learning_rate": 3.7549255592849575e-06,
1359
+ "loss": 0.6072,
1360
+ "step": 193
1361
+ },
1362
+ {
1363
+ "epoch": 0.776,
1364
+ "grad_norm": 0.2766267849608307,
1365
+ "learning_rate": 3.7401294725226707e-06,
1366
+ "loss": 0.6158,
1367
+ "step": 194
1368
+ },
1369
+ {
1370
+ "epoch": 0.78,
1371
+ "grad_norm": 0.26291754258948374,
1372
+ "learning_rate": 3.7252755429450437e-06,
1373
+ "loss": 0.5921,
1374
+ "step": 195
1375
+ },
1376
+ {
1377
+ "epoch": 0.784,
1378
+ "grad_norm": 0.26530027759256725,
1379
+ "learning_rate": 3.7103644633774015e-06,
1380
+ "loss": 0.5841,
1381
+ "step": 196
1382
+ },
1383
+ {
1384
+ "epoch": 0.788,
1385
+ "grad_norm": 0.26634011298693916,
1386
+ "learning_rate": 3.695396929310693e-06,
1387
+ "loss": 0.6147,
1388
+ "step": 197
1389
+ },
1390
+ {
1391
+ "epoch": 0.792,
1392
+ "grad_norm": 0.26354297421036926,
1393
+ "learning_rate": 3.680373638869047e-06,
1394
+ "loss": 0.6061,
1395
+ "step": 198
1396
+ },
1397
+ {
1398
+ "epoch": 0.796,
1399
+ "grad_norm": 0.2738313781435172,
1400
+ "learning_rate": 3.665295292777214e-06,
1401
+ "loss": 0.5903,
1402
+ "step": 199
1403
+ },
1404
+ {
1405
+ "epoch": 0.8,
1406
+ "grad_norm": 0.27041603557150606,
1407
+ "learning_rate": 3.650162594327881e-06,
1408
+ "loss": 0.6216,
1409
+ "step": 200
1410
+ },
1411
+ {
1412
+ "epoch": 0.804,
1413
+ "grad_norm": 0.29233209893761647,
1414
+ "learning_rate": 3.634976249348867e-06,
1415
+ "loss": 0.6221,
1416
+ "step": 201
1417
+ },
1418
+ {
1419
+ "epoch": 0.808,
1420
+ "grad_norm": 0.28236083977418097,
1421
+ "learning_rate": 3.6197369661702052e-06,
1422
+ "loss": 0.6048,
1423
+ "step": 202
1424
+ },
1425
+ {
1426
+ "epoch": 0.812,
1427
+ "grad_norm": 0.2610570975246162,
1428
+ "learning_rate": 3.604445455591099e-06,
1429
+ "loss": 0.586,
1430
+ "step": 203
1431
+ },
1432
+ {
1433
+ "epoch": 0.816,
1434
+ "grad_norm": 0.27792495585124566,
1435
+ "learning_rate": 3.589102430846773e-06,
1436
+ "loss": 0.6052,
1437
+ "step": 204
1438
+ },
1439
+ {
1440
+ "epoch": 0.82,
1441
+ "grad_norm": 0.27390708264043134,
1442
+ "learning_rate": 3.5737086075752054e-06,
1443
+ "loss": 0.5968,
1444
+ "step": 205
1445
+ },
1446
+ {
1447
+ "epoch": 0.824,
1448
+ "grad_norm": 0.26341409551542055,
1449
+ "learning_rate": 3.5582647037837446e-06,
1450
+ "loss": 0.6128,
1451
+ "step": 206
1452
+ },
1453
+ {
1454
+ "epoch": 0.828,
1455
+ "grad_norm": 0.2659397773506794,
1456
+ "learning_rate": 3.5427714398156267e-06,
1457
+ "loss": 0.6171,
1458
+ "step": 207
1459
+ },
1460
+ {
1461
+ "epoch": 0.832,
1462
+ "grad_norm": 0.278302186061793,
1463
+ "learning_rate": 3.527229538316371e-06,
1464
+ "loss": 0.6001,
1465
+ "step": 208
1466
+ },
1467
+ {
1468
+ "epoch": 0.836,
1469
+ "grad_norm": 0.27935693059901906,
1470
+ "learning_rate": 3.5116397242000748e-06,
1471
+ "loss": 0.5915,
1472
+ "step": 209
1473
+ },
1474
+ {
1475
+ "epoch": 0.84,
1476
+ "grad_norm": 0.2681762304000699,
1477
+ "learning_rate": 3.4960027246156043e-06,
1478
+ "loss": 0.5982,
1479
+ "step": 210
1480
+ },
1481
+ {
1482
+ "epoch": 0.844,
1483
+ "grad_norm": 0.26833511905783713,
1484
+ "learning_rate": 3.480319268912676e-06,
1485
+ "loss": 0.5823,
1486
+ "step": 211
1487
+ },
1488
+ {
1489
+ "epoch": 0.848,
1490
+ "grad_norm": 0.27282574698411466,
1491
+ "learning_rate": 3.4645900886078388e-06,
1492
+ "loss": 0.6098,
1493
+ "step": 212
1494
+ },
1495
+ {
1496
+ "epoch": 0.852,
1497
+ "grad_norm": 0.2841325964241835,
1498
+ "learning_rate": 3.448815917350355e-06,
1499
+ "loss": 0.6054,
1500
+ "step": 213
1501
+ },
1502
+ {
1503
+ "epoch": 0.856,
1504
+ "grad_norm": 0.27811416712297765,
1505
+ "learning_rate": 3.432997490887979e-06,
1506
+ "loss": 0.6071,
1507
+ "step": 214
1508
+ },
1509
+ {
1510
+ "epoch": 0.86,
1511
+ "grad_norm": 0.26897184119138856,
1512
+ "learning_rate": 3.417135547032642e-06,
1513
+ "loss": 0.612,
1514
+ "step": 215
1515
+ },
1516
+ {
1517
+ "epoch": 0.864,
1518
+ "grad_norm": 0.2752083088143504,
1519
+ "learning_rate": 3.4012308256260366e-06,
1520
+ "loss": 0.6189,
1521
+ "step": 216
1522
+ },
1523
+ {
1524
+ "epoch": 0.868,
1525
+ "grad_norm": 0.27146348269262077,
1526
+ "learning_rate": 3.385284068505113e-06,
1527
+ "loss": 0.5914,
1528
+ "step": 217
1529
+ },
1530
+ {
1531
+ "epoch": 0.872,
1532
+ "grad_norm": 0.2616553689610195,
1533
+ "learning_rate": 3.369296019467473e-06,
1534
+ "loss": 0.5935,
1535
+ "step": 218
1536
+ },
1537
+ {
1538
+ "epoch": 0.876,
1539
+ "grad_norm": 0.27439799989957114,
1540
+ "learning_rate": 3.3532674242366764e-06,
1541
+ "loss": 0.5815,
1542
+ "step": 219
1543
+ },
1544
+ {
1545
+ "epoch": 0.88,
1546
+ "grad_norm": 0.27322752130009204,
1547
+ "learning_rate": 3.3371990304274654e-06,
1548
+ "loss": 0.593,
1549
+ "step": 220
1550
+ },
1551
+ {
1552
+ "epoch": 0.884,
1553
+ "grad_norm": 0.2776915569429837,
1554
+ "learning_rate": 3.3210915875108895e-06,
1555
+ "loss": 0.636,
1556
+ "step": 221
1557
+ },
1558
+ {
1559
+ "epoch": 0.888,
1560
+ "grad_norm": 0.27021599999486623,
1561
+ "learning_rate": 3.304945846779346e-06,
1562
+ "loss": 0.618,
1563
+ "step": 222
1564
+ },
1565
+ {
1566
+ "epoch": 0.892,
1567
+ "grad_norm": 0.27748742860539916,
1568
+ "learning_rate": 3.2887625613115427e-06,
1569
+ "loss": 0.5937,
1570
+ "step": 223
1571
+ },
1572
+ {
1573
+ "epoch": 0.896,
1574
+ "grad_norm": 0.28302784990496294,
1575
+ "learning_rate": 3.272542485937369e-06,
1576
+ "loss": 0.6093,
1577
+ "step": 224
1578
+ },
1579
+ {
1580
+ "epoch": 0.9,
1581
+ "grad_norm": 0.2759446162478654,
1582
+ "learning_rate": 3.25628637720269e-06,
1583
+ "loss": 0.6261,
1584
+ "step": 225
1585
+ },
1586
+ {
1587
+ "epoch": 0.904,
1588
+ "grad_norm": 0.2794223898275526,
1589
+ "learning_rate": 3.239994993334059e-06,
1590
+ "loss": 0.6098,
1591
+ "step": 226
1592
+ },
1593
+ {
1594
+ "epoch": 0.908,
1595
+ "grad_norm": 0.2863521541541822,
1596
+ "learning_rate": 3.2236690942033523e-06,
1597
+ "loss": 0.6122,
1598
+ "step": 227
1599
+ },
1600
+ {
1601
+ "epoch": 0.912,
1602
+ "grad_norm": 0.2810413939687701,
1603
+ "learning_rate": 3.207309441292325e-06,
1604
+ "loss": 0.6193,
1605
+ "step": 228
1606
+ },
1607
+ {
1608
+ "epoch": 0.916,
1609
+ "grad_norm": 0.2686724574589154,
1610
+ "learning_rate": 3.1909167976570977e-06,
1611
+ "loss": 0.5847,
1612
+ "step": 229
1613
+ },
1614
+ {
1615
+ "epoch": 0.92,
1616
+ "grad_norm": 0.27668513115261734,
1617
+ "learning_rate": 3.174491927892561e-06,
1618
+ "loss": 0.6083,
1619
+ "step": 230
1620
+ },
1621
+ {
1622
+ "epoch": 0.924,
1623
+ "grad_norm": 0.2838278304951842,
1624
+ "learning_rate": 3.158035598096715e-06,
1625
+ "loss": 0.597,
1626
+ "step": 231
1627
+ },
1628
+ {
1629
+ "epoch": 0.928,
1630
+ "grad_norm": 0.2702238327135827,
1631
+ "learning_rate": 3.1415485758349344e-06,
1632
+ "loss": 0.5884,
1633
+ "step": 232
1634
+ },
1635
+ {
1636
+ "epoch": 0.932,
1637
+ "grad_norm": 0.2677368696260293,
1638
+ "learning_rate": 3.1250316301041727e-06,
1639
+ "loss": 0.5835,
1640
+ "step": 233
1641
+ },
1642
+ {
1643
+ "epoch": 0.936,
1644
+ "grad_norm": 0.2848905107931979,
1645
+ "learning_rate": 3.1084855312970897e-06,
1646
+ "loss": 0.6255,
1647
+ "step": 234
1648
+ },
1649
+ {
1650
+ "epoch": 0.94,
1651
+ "grad_norm": 0.2788843417615313,
1652
+ "learning_rate": 3.091911051166117e-06,
1653
+ "loss": 0.6215,
1654
+ "step": 235
1655
+ },
1656
+ {
1657
+ "epoch": 0.944,
1658
+ "grad_norm": 0.27680715854768223,
1659
+ "learning_rate": 3.0753089627874668e-06,
1660
+ "loss": 0.6022,
1661
+ "step": 236
1662
+ },
1663
+ {
1664
+ "epoch": 0.948,
1665
+ "grad_norm": 0.26897413893634964,
1666
+ "learning_rate": 3.0586800405250677e-06,
1667
+ "loss": 0.6194,
1668
+ "step": 237
1669
+ },
1670
+ {
1671
+ "epoch": 0.952,
1672
+ "grad_norm": 0.2657249388294363,
1673
+ "learning_rate": 3.0420250599944525e-06,
1674
+ "loss": 0.5884,
1675
+ "step": 238
1676
+ },
1677
+ {
1678
+ "epoch": 0.956,
1679
+ "grad_norm": 0.26576882569871957,
1680
+ "learning_rate": 3.0253447980265754e-06,
1681
+ "loss": 0.5949,
1682
+ "step": 239
1683
+ },
1684
+ {
1685
+ "epoch": 0.96,
1686
+ "grad_norm": 0.2754408264706426,
1687
+ "learning_rate": 3.0086400326315853e-06,
1688
+ "loss": 0.5747,
1689
+ "step": 240
1690
+ },
1691
+ {
1692
+ "epoch": 0.964,
1693
+ "grad_norm": 0.26333215813151695,
1694
+ "learning_rate": 2.9919115429625295e-06,
1695
+ "loss": 0.6035,
1696
+ "step": 241
1697
+ },
1698
+ {
1699
+ "epoch": 0.968,
1700
+ "grad_norm": 0.2687016604382336,
1701
+ "learning_rate": 2.9751601092790185e-06,
1702
+ "loss": 0.6057,
1703
+ "step": 242
1704
+ },
1705
+ {
1706
+ "epoch": 0.972,
1707
+ "grad_norm": 0.28837866031127346,
1708
+ "learning_rate": 2.958386512910831e-06,
1709
+ "loss": 0.5737,
1710
+ "step": 243
1711
+ },
1712
+ {
1713
+ "epoch": 0.976,
1714
+ "grad_norm": 0.27805411098388116,
1715
+ "learning_rate": 2.941591536221469e-06,
1716
+ "loss": 0.6022,
1717
+ "step": 244
1718
+ },
1719
+ {
1720
+ "epoch": 0.98,
1721
+ "grad_norm": 0.27696163335286905,
1722
+ "learning_rate": 2.924775962571667e-06,
1723
+ "loss": 0.6081,
1724
+ "step": 245
1725
+ },
1726
+ {
1727
+ "epoch": 0.984,
1728
+ "grad_norm": 0.28312029387331156,
1729
+ "learning_rate": 2.907940576282856e-06,
1730
+ "loss": 0.6178,
1731
+ "step": 246
1732
+ },
1733
+ {
1734
+ "epoch": 0.988,
1735
+ "grad_norm": 0.2516197979856304,
1736
+ "learning_rate": 2.8910861626005774e-06,
1737
+ "loss": 0.5812,
1738
+ "step": 247
1739
+ },
1740
+ {
1741
+ "epoch": 0.992,
1742
+ "grad_norm": 0.25765553700425475,
1743
+ "learning_rate": 2.8742135076578608e-06,
1744
+ "loss": 0.5868,
1745
+ "step": 248
1746
+ },
1747
+ {
1748
+ "epoch": 0.996,
1749
+ "grad_norm": 0.27540024093918797,
1750
+ "learning_rate": 2.857323398438554e-06,
1751
+ "loss": 0.5976,
1752
+ "step": 249
1753
+ },
1754
+ {
1755
+ "epoch": 1.0,
1756
+ "grad_norm": 0.2637219036362386,
1757
+ "learning_rate": 2.840416622740617e-06,
1758
+ "loss": 0.6106,
1759
+ "step": 250
1760
+ },
1761
+ {
1762
+ "epoch": 1.004,
1763
+ "grad_norm": 0.3270308432987493,
1764
+ "learning_rate": 2.8234939691393765e-06,
1765
+ "loss": 0.5662,
1766
+ "step": 251
1767
+ },
1768
+ {
1769
+ "epoch": 1.008,
1770
+ "grad_norm": 0.3032460726332443,
1771
+ "learning_rate": 2.8065562269507464e-06,
1772
+ "loss": 0.589,
1773
+ "step": 252
1774
+ },
1775
+ {
1776
+ "epoch": 1.012,
1777
+ "grad_norm": 0.2761928570311231,
1778
+ "learning_rate": 2.789604186194411e-06,
1779
+ "loss": 0.5654,
1780
+ "step": 253
1781
+ },
1782
+ {
1783
+ "epoch": 1.016,
1784
+ "grad_norm": 0.27190181641802363,
1785
+ "learning_rate": 2.7726386375569748e-06,
1786
+ "loss": 0.5908,
1787
+ "step": 254
1788
+ },
1789
+ {
1790
+ "epoch": 1.02,
1791
+ "grad_norm": 0.27382272887323905,
1792
+ "learning_rate": 2.7556603723550855e-06,
1793
+ "loss": 0.553,
1794
+ "step": 255
1795
+ },
1796
+ {
1797
+ "epoch": 1.024,
1798
+ "grad_norm": 0.2699869769385883,
1799
+ "learning_rate": 2.7386701824985257e-06,
1800
+ "loss": 0.5517,
1801
+ "step": 256
1802
+ },
1803
+ {
1804
+ "epoch": 1.028,
1805
+ "grad_norm": 0.2740431770397761,
1806
+ "learning_rate": 2.721668860453271e-06,
1807
+ "loss": 0.5611,
1808
+ "step": 257
1809
+ },
1810
+ {
1811
+ "epoch": 1.032,
1812
+ "grad_norm": 0.27006742872479766,
1813
+ "learning_rate": 2.7046571992045334e-06,
1814
+ "loss": 0.5609,
1815
+ "step": 258
1816
+ },
1817
+ {
1818
+ "epoch": 1.036,
1819
+ "grad_norm": 0.2853628863130536,
1820
+ "learning_rate": 2.6876359922197703e-06,
1821
+ "loss": 0.5839,
1822
+ "step": 259
1823
+ },
1824
+ {
1825
+ "epoch": 1.04,
1826
+ "grad_norm": 0.2674934886902205,
1827
+ "learning_rate": 2.670606033411678e-06,
1828
+ "loss": 0.5692,
1829
+ "step": 260
1830
+ },
1831
+ {
1832
+ "epoch": 1.044,
1833
+ "grad_norm": 0.2717961476035451,
1834
+ "learning_rate": 2.653568117101159e-06,
1835
+ "loss": 0.5586,
1836
+ "step": 261
1837
+ },
1838
+ {
1839
+ "epoch": 1.048,
1840
+ "grad_norm": 0.2886664885195019,
1841
+ "learning_rate": 2.636523037980275e-06,
1842
+ "loss": 0.5592,
1843
+ "step": 262
1844
+ },
1845
+ {
1846
+ "epoch": 1.052,
1847
+ "grad_norm": 0.2913975175671213,
1848
+ "learning_rate": 2.6194715910751806e-06,
1849
+ "loss": 0.5588,
1850
+ "step": 263
1851
+ },
1852
+ {
1853
+ "epoch": 1.056,
1854
+ "grad_norm": 0.26635335287134043,
1855
+ "learning_rate": 2.602414571709036e-06,
1856
+ "loss": 0.5518,
1857
+ "step": 264
1858
+ },
1859
+ {
1860
+ "epoch": 1.06,
1861
+ "grad_norm": 0.27589381073650543,
1862
+ "learning_rate": 2.58535277546492e-06,
1863
+ "loss": 0.5588,
1864
+ "step": 265
1865
+ },
1866
+ {
1867
+ "epoch": 1.064,
1868
+ "grad_norm": 0.26891839582782207,
1869
+ "learning_rate": 2.5682869981487154e-06,
1870
+ "loss": 0.5671,
1871
+ "step": 266
1872
+ },
1873
+ {
1874
+ "epoch": 1.068,
1875
+ "grad_norm": 0.2699654255248958,
1876
+ "learning_rate": 2.5512180357519913e-06,
1877
+ "loss": 0.5663,
1878
+ "step": 267
1879
+ },
1880
+ {
1881
+ "epoch": 1.072,
1882
+ "grad_norm": 0.27268944362101294,
1883
+ "learning_rate": 2.5341466844148775e-06,
1884
+ "loss": 0.5689,
1885
+ "step": 268
1886
+ },
1887
+ {
1888
+ "epoch": 1.076,
1889
+ "grad_norm": 0.2895176432362157,
1890
+ "learning_rate": 2.5170737403889334e-06,
1891
+ "loss": 0.555,
1892
+ "step": 269
1893
+ },
1894
+ {
1895
+ "epoch": 1.08,
1896
+ "grad_norm": 0.275834303493974,
1897
+ "learning_rate": 2.5e-06,
1898
+ "loss": 0.5652,
1899
+ "step": 270
1900
+ },
1901
+ {
1902
+ "epoch": 1.084,
1903
+ "grad_norm": 0.27109148773857455,
1904
+ "learning_rate": 2.4829262596110674e-06,
1905
+ "loss": 0.5531,
1906
+ "step": 271
1907
+ },
1908
+ {
1909
+ "epoch": 1.088,
1910
+ "grad_norm": 0.2885944970109634,
1911
+ "learning_rate": 2.465853315585123e-06,
1912
+ "loss": 0.5645,
1913
+ "step": 272
1914
+ },
1915
+ {
1916
+ "epoch": 1.092,
1917
+ "grad_norm": 0.3325900759228949,
1918
+ "learning_rate": 2.44878196424801e-06,
1919
+ "loss": 0.567,
1920
+ "step": 273
1921
+ },
1922
+ {
1923
+ "epoch": 1.096,
1924
+ "grad_norm": 0.2656725633978504,
1925
+ "learning_rate": 2.431713001851286e-06,
1926
+ "loss": 0.5645,
1927
+ "step": 274
1928
+ },
1929
+ {
1930
+ "epoch": 1.1,
1931
+ "grad_norm": 0.28515147451371026,
1932
+ "learning_rate": 2.4146472245350804e-06,
1933
+ "loss": 0.5627,
1934
+ "step": 275
1935
+ },
1936
+ {
1937
+ "epoch": 1.104,
1938
+ "grad_norm": 0.2772295325997671,
1939
+ "learning_rate": 2.3975854282909645e-06,
1940
+ "loss": 0.5606,
1941
+ "step": 276
1942
+ },
1943
+ {
1944
+ "epoch": 1.108,
1945
+ "grad_norm": 0.2732258740956759,
1946
+ "learning_rate": 2.3805284089248203e-06,
1947
+ "loss": 0.558,
1948
+ "step": 277
1949
+ },
1950
+ {
1951
+ "epoch": 1.112,
1952
+ "grad_norm": 0.2657630056993197,
1953
+ "learning_rate": 2.3634769620197253e-06,
1954
+ "loss": 0.5639,
1955
+ "step": 278
1956
+ },
1957
+ {
1958
+ "epoch": 1.116,
1959
+ "grad_norm": 0.2741360647437433,
1960
+ "learning_rate": 2.3464318828988416e-06,
1961
+ "loss": 0.539,
1962
+ "step": 279
1963
+ },
1964
+ {
1965
+ "epoch": 1.12,
1966
+ "grad_norm": 0.27924091679374063,
1967
+ "learning_rate": 2.3293939665883233e-06,
1968
+ "loss": 0.563,
1969
+ "step": 280
1970
+ },
1971
+ {
1972
+ "epoch": 1.124,
1973
+ "grad_norm": 0.2663368697744116,
1974
+ "learning_rate": 2.3123640077802305e-06,
1975
+ "loss": 0.5639,
1976
+ "step": 281
1977
+ },
1978
+ {
1979
+ "epoch": 1.1280000000000001,
1980
+ "grad_norm": 0.2635159398782791,
1981
+ "learning_rate": 2.2953428007954682e-06,
1982
+ "loss": 0.5225,
1983
+ "step": 282
1984
+ },
1985
+ {
1986
+ "epoch": 1.1320000000000001,
1987
+ "grad_norm": 0.26790514294280293,
1988
+ "learning_rate": 2.2783311395467304e-06,
1989
+ "loss": 0.5556,
1990
+ "step": 283
1991
+ },
1992
+ {
1993
+ "epoch": 1.1360000000000001,
1994
+ "grad_norm": 0.27007506595154834,
1995
+ "learning_rate": 2.261329817501475e-06,
1996
+ "loss": 0.545,
1997
+ "step": 284
1998
+ },
1999
+ {
2000
+ "epoch": 1.1400000000000001,
2001
+ "grad_norm": 0.27274462523361476,
2002
+ "learning_rate": 2.2443396276449145e-06,
2003
+ "loss": 0.5724,
2004
+ "step": 285
2005
+ },
2006
+ {
2007
+ "epoch": 1.144,
2008
+ "grad_norm": 0.25666672253669337,
2009
+ "learning_rate": 2.2273613624430256e-06,
2010
+ "loss": 0.541,
2011
+ "step": 286
2012
+ },
2013
+ {
2014
+ "epoch": 1.148,
2015
+ "grad_norm": 0.30831325626025924,
2016
+ "learning_rate": 2.2103958138055897e-06,
2017
+ "loss": 0.5544,
2018
+ "step": 287
2019
+ },
2020
+ {
2021
+ "epoch": 1.152,
2022
+ "grad_norm": 0.26679849744140743,
2023
+ "learning_rate": 2.1934437730492544e-06,
2024
+ "loss": 0.5544,
2025
+ "step": 288
2026
+ },
2027
+ {
2028
+ "epoch": 1.156,
2029
+ "grad_norm": 0.26795918133510693,
2030
+ "learning_rate": 2.1765060308606243e-06,
2031
+ "loss": 0.5726,
2032
+ "step": 289
2033
+ },
2034
+ {
2035
+ "epoch": 1.16,
2036
+ "grad_norm": 0.26692330671607206,
2037
+ "learning_rate": 2.159583377259384e-06,
2038
+ "loss": 0.5771,
2039
+ "step": 290
2040
+ },
2041
+ {
2042
+ "epoch": 1.164,
2043
+ "grad_norm": 0.26800012106006976,
2044
+ "learning_rate": 2.142676601561447e-06,
2045
+ "loss": 0.5376,
2046
+ "step": 291
2047
+ },
2048
+ {
2049
+ "epoch": 1.168,
2050
+ "grad_norm": 0.25520584565446575,
2051
+ "learning_rate": 2.1257864923421405e-06,
2052
+ "loss": 0.5439,
2053
+ "step": 292
2054
+ },
2055
+ {
2056
+ "epoch": 1.172,
2057
+ "grad_norm": 0.26220655980779767,
2058
+ "learning_rate": 2.1089138373994226e-06,
2059
+ "loss": 0.552,
2060
+ "step": 293
2061
+ },
2062
+ {
2063
+ "epoch": 1.176,
2064
+ "grad_norm": 0.2599229946929615,
2065
+ "learning_rate": 2.092059423717145e-06,
2066
+ "loss": 0.5591,
2067
+ "step": 294
2068
+ },
2069
+ {
2070
+ "epoch": 1.18,
2071
+ "grad_norm": 0.26226465656771997,
2072
+ "learning_rate": 2.0752240374283334e-06,
2073
+ "loss": 0.5424,
2074
+ "step": 295
2075
+ },
2076
+ {
2077
+ "epoch": 1.184,
2078
+ "grad_norm": 0.27097583397605235,
2079
+ "learning_rate": 2.0584084637785316e-06,
2080
+ "loss": 0.5613,
2081
+ "step": 296
2082
+ },
2083
+ {
2084
+ "epoch": 1.188,
2085
+ "grad_norm": 0.25487494718289816,
2086
+ "learning_rate": 2.0416134870891697e-06,
2087
+ "loss": 0.5531,
2088
+ "step": 297
2089
+ },
2090
+ {
2091
+ "epoch": 1.192,
2092
+ "grad_norm": 0.258627059695691,
2093
+ "learning_rate": 2.0248398907209827e-06,
2094
+ "loss": 0.5603,
2095
+ "step": 298
2096
+ },
2097
+ {
2098
+ "epoch": 1.196,
2099
+ "grad_norm": 0.27660938201914487,
2100
+ "learning_rate": 2.008088457037472e-06,
2101
+ "loss": 0.5648,
2102
+ "step": 299
2103
+ },
2104
+ {
2105
+ "epoch": 1.2,
2106
+ "grad_norm": 0.26596618229804614,
2107
+ "learning_rate": 1.991359967368416e-06,
2108
+ "loss": 0.574,
2109
+ "step": 300
2110
+ },
2111
+ {
2112
+ "epoch": 1.204,
2113
+ "grad_norm": 0.2618039991994125,
2114
+ "learning_rate": 1.9746552019734246e-06,
2115
+ "loss": 0.5492,
2116
+ "step": 301
2117
+ },
2118
+ {
2119
+ "epoch": 1.208,
2120
+ "grad_norm": 0.272654754048079,
2121
+ "learning_rate": 1.957974940005548e-06,
2122
+ "loss": 0.5767,
2123
+ "step": 302
2124
+ },
2125
+ {
2126
+ "epoch": 1.212,
2127
+ "grad_norm": 0.26671171090168844,
2128
+ "learning_rate": 1.9413199594749327e-06,
2129
+ "loss": 0.5338,
2130
+ "step": 303
2131
+ },
2132
+ {
2133
+ "epoch": 1.216,
2134
+ "grad_norm": 0.270832034752946,
2135
+ "learning_rate": 1.9246910372125345e-06,
2136
+ "loss": 0.5647,
2137
+ "step": 304
2138
+ },
2139
+ {
2140
+ "epoch": 1.22,
2141
+ "grad_norm": 0.2643013014532281,
2142
+ "learning_rate": 1.9080889488338833e-06,
2143
+ "loss": 0.5597,
2144
+ "step": 305
2145
+ },
2146
+ {
2147
+ "epoch": 1.224,
2148
+ "grad_norm": 0.27092629093151294,
2149
+ "learning_rate": 1.8915144687029107e-06,
2150
+ "loss": 0.5485,
2151
+ "step": 306
2152
+ },
2153
+ {
2154
+ "epoch": 1.228,
2155
+ "grad_norm": 0.26613235310331923,
2156
+ "learning_rate": 1.874968369895828e-06,
2157
+ "loss": 0.552,
2158
+ "step": 307
2159
+ },
2160
+ {
2161
+ "epoch": 1.232,
2162
+ "grad_norm": 0.2582406507610385,
2163
+ "learning_rate": 1.8584514241650667e-06,
2164
+ "loss": 0.5477,
2165
+ "step": 308
2166
+ },
2167
+ {
2168
+ "epoch": 1.236,
2169
+ "grad_norm": 0.2687239346512157,
2170
+ "learning_rate": 1.8419644019032868e-06,
2171
+ "loss": 0.5716,
2172
+ "step": 309
2173
+ },
2174
+ {
2175
+ "epoch": 1.24,
2176
+ "grad_norm": 0.2634130471010307,
2177
+ "learning_rate": 1.8255080721074391e-06,
2178
+ "loss": 0.5511,
2179
+ "step": 310
2180
+ },
2181
+ {
2182
+ "epoch": 1.244,
2183
+ "grad_norm": 0.27164814594203673,
2184
+ "learning_rate": 1.8090832023429022e-06,
2185
+ "loss": 0.5605,
2186
+ "step": 311
2187
+ },
2188
+ {
2189
+ "epoch": 1.248,
2190
+ "grad_norm": 0.26506994687071844,
2191
+ "learning_rate": 1.792690558707675e-06,
2192
+ "loss": 0.5522,
2193
+ "step": 312
2194
+ },
2195
+ {
2196
+ "epoch": 1.252,
2197
+ "grad_norm": 0.2710404134246902,
2198
+ "learning_rate": 1.7763309057966487e-06,
2199
+ "loss": 0.5754,
2200
+ "step": 313
2201
+ },
2202
+ {
2203
+ "epoch": 1.256,
2204
+ "grad_norm": 0.2644648635617096,
2205
+ "learning_rate": 1.7600050066659418e-06,
2206
+ "loss": 0.5542,
2207
+ "step": 314
2208
+ },
2209
+ {
2210
+ "epoch": 1.26,
2211
+ "grad_norm": 0.2563441049550618,
2212
+ "learning_rate": 1.7437136227973108e-06,
2213
+ "loss": 0.5428,
2214
+ "step": 315
2215
+ },
2216
+ {
2217
+ "epoch": 1.264,
2218
+ "grad_norm": 0.2781486176634604,
2219
+ "learning_rate": 1.7274575140626318e-06,
2220
+ "loss": 0.5469,
2221
+ "step": 316
2222
+ },
2223
+ {
2224
+ "epoch": 1.268,
2225
+ "grad_norm": 0.26498925525964123,
2226
+ "learning_rate": 1.7112374386884583e-06,
2227
+ "loss": 0.5671,
2228
+ "step": 317
2229
+ },
2230
+ {
2231
+ "epoch": 1.272,
2232
+ "grad_norm": 0.27752994625791155,
2233
+ "learning_rate": 1.695054153220655e-06,
2234
+ "loss": 0.5437,
2235
+ "step": 318
2236
+ },
2237
+ {
2238
+ "epoch": 1.276,
2239
+ "grad_norm": 0.2602835745087367,
2240
+ "learning_rate": 1.678908412489111e-06,
2241
+ "loss": 0.5602,
2242
+ "step": 319
2243
+ },
2244
+ {
2245
+ "epoch": 1.28,
2246
+ "grad_norm": 0.2665909274717665,
2247
+ "learning_rate": 1.6628009695725348e-06,
2248
+ "loss": 0.5762,
2249
+ "step": 320
2250
+ },
2251
+ {
2252
+ "epoch": 1.284,
2253
+ "grad_norm": 0.2531045714341802,
2254
+ "learning_rate": 1.6467325757633242e-06,
2255
+ "loss": 0.5674,
2256
+ "step": 321
2257
+ },
2258
+ {
2259
+ "epoch": 1.288,
2260
+ "grad_norm": 0.2621120701043234,
2261
+ "learning_rate": 1.630703980532528e-06,
2262
+ "loss": 0.5598,
2263
+ "step": 322
2264
+ },
2265
+ {
2266
+ "epoch": 1.292,
2267
+ "grad_norm": 0.275203318844759,
2268
+ "learning_rate": 1.6147159314948873e-06,
2269
+ "loss": 0.564,
2270
+ "step": 323
2271
+ },
2272
+ {
2273
+ "epoch": 1.296,
2274
+ "grad_norm": 0.2603786935372431,
2275
+ "learning_rate": 1.5987691743739636e-06,
2276
+ "loss": 0.5535,
2277
+ "step": 324
2278
+ },
2279
+ {
2280
+ "epoch": 1.3,
2281
+ "grad_norm": 0.26776057829072847,
2282
+ "learning_rate": 1.5828644529673592e-06,
2283
+ "loss": 0.5627,
2284
+ "step": 325
2285
+ },
2286
+ {
2287
+ "epoch": 1.304,
2288
+ "grad_norm": 0.2623048715119549,
2289
+ "learning_rate": 1.5670025091120219e-06,
2290
+ "loss": 0.5685,
2291
+ "step": 326
2292
+ },
2293
+ {
2294
+ "epoch": 1.308,
2295
+ "grad_norm": 0.26608080729860306,
2296
+ "learning_rate": 1.5511840826496462e-06,
2297
+ "loss": 0.5695,
2298
+ "step": 327
2299
+ },
2300
+ {
2301
+ "epoch": 1.312,
2302
+ "grad_norm": 0.2673157087455161,
2303
+ "learning_rate": 1.5354099113921614e-06,
2304
+ "loss": 0.5354,
2305
+ "step": 328
2306
+ },
2307
+ {
2308
+ "epoch": 1.316,
2309
+ "grad_norm": 0.2754778027664838,
2310
+ "learning_rate": 1.519680731087325e-06,
2311
+ "loss": 0.5705,
2312
+ "step": 329
2313
+ },
2314
+ {
2315
+ "epoch": 1.32,
2316
+ "grad_norm": 0.2697793809470272,
2317
+ "learning_rate": 1.5039972753843966e-06,
2318
+ "loss": 0.5748,
2319
+ "step": 330
2320
+ },
2321
+ {
2322
+ "epoch": 1.324,
2323
+ "grad_norm": 0.2737068013002947,
2324
+ "learning_rate": 1.488360275799926e-06,
2325
+ "loss": 0.5554,
2326
+ "step": 331
2327
+ },
2328
+ {
2329
+ "epoch": 1.328,
2330
+ "grad_norm": 0.2618826312103745,
2331
+ "learning_rate": 1.4727704616836297e-06,
2332
+ "loss": 0.5516,
2333
+ "step": 332
2334
+ },
2335
+ {
2336
+ "epoch": 1.332,
2337
+ "grad_norm": 0.2610637671077678,
2338
+ "learning_rate": 1.457228560184374e-06,
2339
+ "loss": 0.5491,
2340
+ "step": 333
2341
+ },
2342
+ {
2343
+ "epoch": 1.336,
2344
+ "grad_norm": 0.2615954274949455,
2345
+ "learning_rate": 1.441735296216256e-06,
2346
+ "loss": 0.5556,
2347
+ "step": 334
2348
+ },
2349
+ {
2350
+ "epoch": 1.34,
2351
+ "grad_norm": 0.2650842665446321,
2352
+ "learning_rate": 1.4262913924247956e-06,
2353
+ "loss": 0.5475,
2354
+ "step": 335
2355
+ },
2356
+ {
2357
+ "epoch": 1.3439999999999999,
2358
+ "grad_norm": 0.25585711289460494,
2359
+ "learning_rate": 1.4108975691532273e-06,
2360
+ "loss": 0.5549,
2361
+ "step": 336
2362
+ },
2363
+ {
2364
+ "epoch": 1.3479999999999999,
2365
+ "grad_norm": 0.25250951856873494,
2366
+ "learning_rate": 1.3955545444089017e-06,
2367
+ "loss": 0.5601,
2368
+ "step": 337
2369
+ },
2370
+ {
2371
+ "epoch": 1.3519999999999999,
2372
+ "grad_norm": 0.2569876765702743,
2373
+ "learning_rate": 1.3802630338297956e-06,
2374
+ "loss": 0.548,
2375
+ "step": 338
2376
+ },
2377
+ {
2378
+ "epoch": 1.3559999999999999,
2379
+ "grad_norm": 0.28203813821342777,
2380
+ "learning_rate": 1.3650237506511333e-06,
2381
+ "loss": 0.566,
2382
+ "step": 339
2383
+ },
2384
+ {
2385
+ "epoch": 1.3599999999999999,
2386
+ "grad_norm": 0.27081394807954995,
2387
+ "learning_rate": 1.3498374056721198e-06,
2388
+ "loss": 0.5594,
2389
+ "step": 340
2390
+ },
2391
+ {
2392
+ "epoch": 1.3639999999999999,
2393
+ "grad_norm": 0.26811473148644754,
2394
+ "learning_rate": 1.334704707222787e-06,
2395
+ "loss": 0.5558,
2396
+ "step": 341
2397
+ },
2398
+ {
2399
+ "epoch": 1.3679999999999999,
2400
+ "grad_norm": 0.2635422950339071,
2401
+ "learning_rate": 1.3196263611309539e-06,
2402
+ "loss": 0.5707,
2403
+ "step": 342
2404
+ },
2405
+ {
2406
+ "epoch": 1.3719999999999999,
2407
+ "grad_norm": 0.25640933129102816,
2408
+ "learning_rate": 1.3046030706893079e-06,
2409
+ "loss": 0.5551,
2410
+ "step": 343
2411
+ },
2412
+ {
2413
+ "epoch": 1.376,
2414
+ "grad_norm": 0.26144553856379565,
2415
+ "learning_rate": 1.2896355366226e-06,
2416
+ "loss": 0.5689,
2417
+ "step": 344
2418
+ },
2419
+ {
2420
+ "epoch": 1.38,
2421
+ "grad_norm": 0.24921356601339556,
2422
+ "learning_rate": 1.2747244570549578e-06,
2423
+ "loss": 0.5223,
2424
+ "step": 345
2425
+ },
2426
+ {
2427
+ "epoch": 1.384,
2428
+ "grad_norm": 0.2739025094336896,
2429
+ "learning_rate": 1.2598705274773299e-06,
2430
+ "loss": 0.5553,
2431
+ "step": 346
2432
+ },
2433
+ {
2434
+ "epoch": 1.388,
2435
+ "grad_norm": 0.26711249323543973,
2436
+ "learning_rate": 1.2450744407150427e-06,
2437
+ "loss": 0.5411,
2438
+ "step": 347
2439
+ },
2440
+ {
2441
+ "epoch": 1.392,
2442
+ "grad_norm": 0.2695324913604517,
2443
+ "learning_rate": 1.2303368868954848e-06,
2444
+ "loss": 0.567,
2445
+ "step": 348
2446
+ },
2447
+ {
2448
+ "epoch": 1.396,
2449
+ "grad_norm": 0.25781431882124334,
2450
+ "learning_rate": 1.215658553415918e-06,
2451
+ "loss": 0.5674,
2452
+ "step": 349
2453
+ },
2454
+ {
2455
+ "epoch": 1.4,
2456
+ "grad_norm": 0.2594657444454727,
2457
+ "learning_rate": 1.2010401249114166e-06,
2458
+ "loss": 0.5366,
2459
+ "step": 350
2460
+ },
2461
+ {
2462
+ "epoch": 1.404,
2463
+ "grad_norm": 0.26265811514812376,
2464
+ "learning_rate": 1.1864822832229319e-06,
2465
+ "loss": 0.5465,
2466
+ "step": 351
2467
+ },
2468
+ {
2469
+ "epoch": 1.408,
2470
+ "grad_norm": 0.26623313611028854,
2471
+ "learning_rate": 1.1719857073654923e-06,
2472
+ "loss": 0.564,
2473
+ "step": 352
2474
+ },
2475
+ {
2476
+ "epoch": 1.412,
2477
+ "grad_norm": 0.2702849646886806,
2478
+ "learning_rate": 1.1575510734965305e-06,
2479
+ "loss": 0.5521,
2480
+ "step": 353
2481
+ },
2482
+ {
2483
+ "epoch": 1.416,
2484
+ "grad_norm": 0.2689048371097966,
2485
+ "learning_rate": 1.1431790548843464e-06,
2486
+ "loss": 0.5527,
2487
+ "step": 354
2488
+ },
2489
+ {
2490
+ "epoch": 1.42,
2491
+ "grad_norm": 0.25819241256540093,
2492
+ "learning_rate": 1.1288703218767027e-06,
2493
+ "loss": 0.54,
2494
+ "step": 355
2495
+ },
2496
+ {
2497
+ "epoch": 1.424,
2498
+ "grad_norm": 0.25437896341243765,
2499
+ "learning_rate": 1.1146255418695635e-06,
2500
+ "loss": 0.528,
2501
+ "step": 356
2502
+ },
2503
+ {
2504
+ "epoch": 1.428,
2505
+ "grad_norm": 0.25406879267543436,
2506
+ "learning_rate": 1.1004453792759547e-06,
2507
+ "loss": 0.5471,
2508
+ "step": 357
2509
+ },
2510
+ {
2511
+ "epoch": 1.432,
2512
+ "grad_norm": 0.2562241134177116,
2513
+ "learning_rate": 1.0863304954949856e-06,
2514
+ "loss": 0.55,
2515
+ "step": 358
2516
+ },
2517
+ {
2518
+ "epoch": 1.436,
2519
+ "grad_norm": 0.26542441620839063,
2520
+ "learning_rate": 1.072281548880992e-06,
2521
+ "loss": 0.5678,
2522
+ "step": 359
2523
+ },
2524
+ {
2525
+ "epoch": 1.44,
2526
+ "grad_norm": 0.269318418298225,
2527
+ "learning_rate": 1.0582991947128324e-06,
2528
+ "loss": 0.5615,
2529
+ "step": 360
2530
+ },
2531
+ {
2532
+ "epoch": 1.444,
2533
+ "grad_norm": 0.2691764014021926,
2534
+ "learning_rate": 1.0443840851633227e-06,
2535
+ "loss": 0.565,
2536
+ "step": 361
2537
+ },
2538
+ {
2539
+ "epoch": 1.448,
2540
+ "grad_norm": 0.2594862635555543,
2541
+ "learning_rate": 1.0305368692688175e-06,
2542
+ "loss": 0.5666,
2543
+ "step": 362
2544
+ },
2545
+ {
2546
+ "epoch": 1.452,
2547
+ "grad_norm": 0.25315547667760335,
2548
+ "learning_rate": 1.0167581928989373e-06,
2549
+ "loss": 0.5555,
2550
+ "step": 363
2551
+ },
2552
+ {
2553
+ "epoch": 1.456,
2554
+ "grad_norm": 0.2613895044115319,
2555
+ "learning_rate": 1.0030486987264436e-06,
2556
+ "loss": 0.5776,
2557
+ "step": 364
2558
+ },
2559
+ {
2560
+ "epoch": 1.46,
2561
+ "grad_norm": 0.2631431546714004,
2562
+ "learning_rate": 9.89409026197264e-07,
2563
+ "loss": 0.5365,
2564
+ "step": 365
2565
+ },
2566
+ {
2567
+ "epoch": 1.464,
2568
+ "grad_norm": 0.2647751182203821,
2569
+ "learning_rate": 9.758398115006637e-07,
2570
+ "loss": 0.5573,
2571
+ "step": 366
2572
+ },
2573
+ {
2574
+ "epoch": 1.468,
2575
+ "grad_norm": 0.26208143115660865,
2576
+ "learning_rate": 9.623416875395763e-07,
2577
+ "loss": 0.5605,
2578
+ "step": 367
2579
+ },
2580
+ {
2581
+ "epoch": 1.472,
2582
+ "grad_norm": 0.2704474380702507,
2583
+ "learning_rate": 9.489152839010799e-07,
2584
+ "loss": 0.5697,
2585
+ "step": 368
2586
+ },
2587
+ {
2588
+ "epoch": 1.476,
2589
+ "grad_norm": 0.275761932132816,
2590
+ "learning_rate": 9.355612268270339e-07,
2591
+ "loss": 0.5586,
2592
+ "step": 369
2593
+ },
2594
+ {
2595
+ "epoch": 1.48,
2596
+ "grad_norm": 0.27242480864427504,
2597
+ "learning_rate": 9.222801391848688e-07,
2598
+ "loss": 0.5573,
2599
+ "step": 370
2600
+ },
2601
+ {
2602
+ "epoch": 1.484,
2603
+ "grad_norm": 0.254674372509468,
2604
+ "learning_rate": 9.090726404385319e-07,
2605
+ "loss": 0.5377,
2606
+ "step": 371
2607
+ },
2608
+ {
2609
+ "epoch": 1.488,
2610
+ "grad_norm": 0.25888213496864965,
2611
+ "learning_rate": 8.959393466195973e-07,
2612
+ "loss": 0.5278,
2613
+ "step": 372
2614
+ },
2615
+ {
2616
+ "epoch": 1.492,
2617
+ "grad_norm": 0.2527033545759895,
2618
+ "learning_rate": 8.828808702985325e-07,
2619
+ "loss": 0.5475,
2620
+ "step": 373
2621
+ },
2622
+ {
2623
+ "epoch": 1.496,
2624
+ "grad_norm": 0.2669973822070058,
2625
+ "learning_rate": 8.69897820556124e-07,
2626
+ "loss": 0.5652,
2627
+ "step": 374
2628
+ },
2629
+ {
2630
+ "epoch": 1.5,
2631
+ "grad_norm": 0.26903334117277744,
2632
+ "learning_rate": 8.569908029550686e-07,
2633
+ "loss": 0.5555,
2634
+ "step": 375
2635
+ }
2636
+ ],
2637
+ "logging_steps": 1,
2638
+ "max_steps": 500,
2639
+ "num_input_tokens_seen": 0,
2640
+ "num_train_epochs": 2,
2641
+ "save_steps": 125,
2642
+ "stateful_callbacks": {
2643
+ "TrainerControl": {
2644
+ "args": {
2645
+ "should_epoch_stop": false,
2646
+ "should_evaluate": false,
2647
+ "should_log": false,
2648
+ "should_save": true,
2649
+ "should_training_stop": false
2650
+ },
2651
+ "attributes": {}
2652
+ }
2653
+ },
2654
+ "total_flos": 4.6053644267893555e+17,
2655
+ "train_batch_size": 1,
2656
+ "trial_name": null,
2657
+ "trial_params": null
2658
+ }
checkpoint-375/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e091b32b97830aabb8c79a574506925e3fa58e7035cd23f0d221a73e0d787a76
3
+ size 8440