jeiku commited on
Commit
8d5284d
1 Parent(s): 2eecfae

Upload folder using huggingface_hub

Browse files
README.md ADDED
@@ -0,0 +1,179 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ license: apache-2.0
4
+ base_model: FourOhFour/Vapor_7B
5
+ tags:
6
+ - generated_from_trainer
7
+ model-index:
8
+ - name: outputs/out
9
+ results: []
10
+ ---
11
+
12
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
13
+ should probably proofread and complete it, then remove this comment. -->
14
+
15
+ [<img src="https://raw.githubusercontent.com/axolotl-ai-cloud/axolotl/main/image/axolotl-badge-web.png" alt="Built with Axolotl" width="200" height="32"/>](https://github.com/axolotl-ai-cloud/axolotl)
16
+ <details><summary>See axolotl config</summary>
17
+
18
+ axolotl version: `0.4.1`
19
+ ```yaml
20
+ base_model: FourOhFour/Vapor_7B
21
+ model_type: AutoModelForCausalLM
22
+ tokenizer_type: AutoTokenizer
23
+
24
+ load_in_8bit: false
25
+ load_in_4bit: false
26
+ strict: false
27
+
28
+ datasets:
29
+ - path: anthracite-org/stheno-filtered-v1.1
30
+ type: sharegpt
31
+ conversation: chatml
32
+ - path: Epiculous/SynthRP-Gens-v1.1-Filtered-n-Cleaned
33
+ type: sharegpt
34
+ conversation: chatml
35
+ - path: ResplendentAI/bluemoon
36
+ type: sharegpt
37
+ conversation: chatml
38
+ - path: openerotica/freedom-rp
39
+ type: sharegpt
40
+ conversation: chatml
41
+ - path: anthracite-org/nopm_claude_writing_fixed
42
+ type: sharegpt
43
+ conversation: chatml
44
+ - path: MinervaAI/Aesir-Preview
45
+ type: sharegpt
46
+ conversation: chatml
47
+ - path: NewEden/c2-prefixed
48
+ type: sharegpt
49
+ conversation: chatml
50
+
51
+ chat_template: chatml
52
+
53
+ val_set_size: 0.01
54
+ output_dir: ./outputs/out
55
+
56
+ adapter:
57
+ lora_r:
58
+ lora_alpha:
59
+ lora_dropout:
60
+ lora_target_linear:
61
+
62
+ sequence_len: 8192
63
+ # sequence_len: 32768
64
+ sample_packing: true
65
+ eval_sample_packing: false
66
+ pad_to_sequence_len: true
67
+
68
+ plugins:
69
+ - axolotl.integrations.liger.LigerPlugin
70
+ liger_rope: true
71
+ liger_rms_norm: true
72
+ liger_swiglu: true
73
+ liger_fused_linear_cross_entropy: true
74
+
75
+ wandb_project: smoke7B
76
+ wandb_entity:
77
+ wandb_watch:
78
+ wandb_name: smoke7B
79
+ wandb_log_model:
80
+
81
+ gradient_accumulation_steps: 32
82
+ micro_batch_size: 1
83
+ num_epochs: 2
84
+ optimizer: adamw_bnb_8bit
85
+ lr_scheduler: cosine
86
+ learning_rate: 0.00001
87
+ weight_decay: 0.05
88
+
89
+ train_on_inputs: false
90
+ group_by_length: false
91
+ bf16: auto
92
+ fp16:
93
+ tf32: true
94
+
95
+ gradient_checkpointing: true
96
+ early_stopping_patience:
97
+ resume_from_checkpoint: /workspace/axolotl/outputs/out/checkpoint-137
98
+ auto_resume_from_checkpoints: true
99
+ local_rank:
100
+ logging_steps: 1
101
+ xformers_attention:
102
+ flash_attention: true
103
+
104
+ warmup_ratio: 0.1
105
+ evals_per_epoch: 4
106
+ eval_table_size:
107
+ eval_max_new_tokens: 128
108
+ saves_per_epoch: 2
109
+
110
+ debug:
111
+ deepspeed:
112
+ fsdp:
113
+ fsdp_config:
114
+
115
+ special_tokens:
116
+ pad_token: <pad>
117
+
118
+
119
+ ```
120
+
121
+ </details><br>
122
+
123
+ # outputs/out
124
+
125
+ This model is a fine-tuned version of [FourOhFour/Vapor_7B](https://huggingface.co/FourOhFour/Vapor_7B) on the None dataset.
126
+ It achieves the following results on the evaluation set:
127
+ - Loss: 1.4023
128
+
129
+ ## Model description
130
+
131
+ More information needed
132
+
133
+ ## Intended uses & limitations
134
+
135
+ More information needed
136
+
137
+ ## Training and evaluation data
138
+
139
+ More information needed
140
+
141
+ ## Training procedure
142
+
143
+ ### Training hyperparameters
144
+
145
+ The following hyperparameters were used during training:
146
+ - learning_rate: 1e-05
147
+ - train_batch_size: 1
148
+ - eval_batch_size: 1
149
+ - seed: 42
150
+ - distributed_type: multi-GPU
151
+ - num_devices: 2
152
+ - gradient_accumulation_steps: 32
153
+ - total_train_batch_size: 64
154
+ - total_eval_batch_size: 2
155
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
156
+ - lr_scheduler_type: cosine
157
+ - lr_scheduler_warmup_steps: 54
158
+ - num_epochs: 2
159
+
160
+ ### Training results
161
+
162
+ | Training Loss | Epoch | Step | Validation Loss |
163
+ |:-------------:|:------:|:----:|:---------------:|
164
+ | 1.539 | 0.0037 | 1 | 1.5559 |
165
+ | 1.562 | 0.2528 | 69 | 1.4611 |
166
+ | 1.4928 | 0.5056 | 138 | 1.4304 |
167
+ | 1.4968 | 0.7583 | 207 | 1.4155 |
168
+ | 1.4817 | 1.0108 | 276 | 1.4075 |
169
+ | 1.4637 | 1.2640 | 345 | 1.4038 |
170
+ | 1.4701 | 1.5171 | 414 | 1.4026 |
171
+ | 1.4657 | 1.7703 | 483 | 1.4023 |
172
+
173
+
174
+ ### Framework versions
175
+
176
+ - Transformers 4.45.0.dev0
177
+ - Pytorch 2.4.0+cu121
178
+ - Datasets 2.21.0
179
+ - Tokenizers 0.19.1
added_tokens.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<pad>": 151665,
4
+ "<tool_call>": 151657,
5
+ "<|box_end|>": 151649,
6
+ "<|box_start|>": 151648,
7
+ "<|endoftext|>": 151643,
8
+ "<|file_sep|>": 151664,
9
+ "<|fim_middle|>": 151660,
10
+ "<|fim_pad|>": 151662,
11
+ "<|fim_prefix|>": 151659,
12
+ "<|fim_suffix|>": 151661,
13
+ "<|im_end|>": 151645,
14
+ "<|im_start|>": 151644,
15
+ "<|image_pad|>": 151655,
16
+ "<|object_ref_end|>": 151647,
17
+ "<|object_ref_start|>": 151646,
18
+ "<|quad_end|>": 151651,
19
+ "<|quad_start|>": 151650,
20
+ "<|repo_name|>": 151663,
21
+ "<|video_pad|>": 151656,
22
+ "<|vision_end|>": 151653,
23
+ "<|vision_pad|>": 151654,
24
+ "<|vision_start|>": 151652
25
+ }
config.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "FourOhFour/Vapor_7B",
3
+ "architectures": [
4
+ "Qwen2ForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "eos_token_id": 151645,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 3584,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 18944,
12
+ "max_position_embeddings": 131072,
13
+ "max_window_layers": 28,
14
+ "model_type": "qwen2",
15
+ "num_attention_heads": 28,
16
+ "num_hidden_layers": 28,
17
+ "num_key_value_heads": 4,
18
+ "rms_norm_eps": 1e-06,
19
+ "rope_scaling": null,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.45.0.dev0",
25
+ "use_cache": false,
26
+ "use_mrope": false,
27
+ "use_sliding_window": false,
28
+ "vocab_size": 152064
29
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": 151643,
5
+ "max_new_tokens": 2048,
6
+ "transformers_version": "4.45.0.dev0"
7
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67c7f5676a5ceee32f391e89de2c640cd0a90ff6e1565a4b64bf76343eaf525c
3
+ size 4877660776
model-00002-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d3689970c531613dadc77b3b8face7971209cbe945d50ef2637e28cb5b2e98a7
3
+ size 4932751008
model-00003-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59d86027ae9023bc9dcc996a3686c3f2627bfb07e1992439d05e27499cc4b3f2
3
+ size 4330865200
model-00004-of-00004.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db06fb89f0c58dea8415c80807be4faba7a250c809460c4d793e1fe56470c653
3
+ size 1089994880
model.safetensors.index.json ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 15231233024
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00004-of-00004.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00004.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
13
+ "model.layers.0.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
14
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
15
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
16
+ "model.layers.0.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
17
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
18
+ "model.layers.0.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
19
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
20
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors",
21
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
22
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
23
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
24
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
25
+ "model.layers.1.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
26
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
27
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
28
+ "model.layers.1.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
29
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
30
+ "model.layers.1.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
31
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
32
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors",
33
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
34
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
35
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
36
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
37
+ "model.layers.10.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
38
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
39
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
40
+ "model.layers.10.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
41
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
42
+ "model.layers.10.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
43
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
44
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors",
45
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
46
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
47
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
48
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
49
+ "model.layers.11.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
50
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
51
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
52
+ "model.layers.11.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
53
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
54
+ "model.layers.11.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
55
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
56
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors",
57
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
58
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
59
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
60
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
61
+ "model.layers.12.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
62
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
63
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
64
+ "model.layers.12.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
65
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
66
+ "model.layers.12.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
67
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
68
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors",
69
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
70
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
71
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
72
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
73
+ "model.layers.13.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
74
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
75
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
76
+ "model.layers.13.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
77
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
78
+ "model.layers.13.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
79
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
80
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors",
81
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
82
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
83
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
84
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
85
+ "model.layers.14.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
86
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
87
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
88
+ "model.layers.14.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
89
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
90
+ "model.layers.14.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
91
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
92
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors",
93
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
94
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
95
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
96
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
97
+ "model.layers.15.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
98
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
99
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
100
+ "model.layers.15.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
101
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
102
+ "model.layers.15.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
103
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
104
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors",
105
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
106
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
107
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
108
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
109
+ "model.layers.16.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
110
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
111
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
112
+ "model.layers.16.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
113
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
114
+ "model.layers.16.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
115
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
116
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors",
117
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
118
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
119
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
120
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
121
+ "model.layers.17.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
122
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
123
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
124
+ "model.layers.17.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
125
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
126
+ "model.layers.17.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
127
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
128
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00004.safetensors",
129
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
130
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
131
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
132
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
133
+ "model.layers.18.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
134
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
135
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
136
+ "model.layers.18.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
137
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
138
+ "model.layers.18.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
139
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
140
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00004.safetensors",
141
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
142
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
143
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
144
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
145
+ "model.layers.19.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
146
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
147
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
148
+ "model.layers.19.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
149
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
150
+ "model.layers.19.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
151
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
152
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors",
153
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
154
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
155
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
156
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
157
+ "model.layers.2.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
158
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
159
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
160
+ "model.layers.2.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
161
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
162
+ "model.layers.2.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
163
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
164
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors",
165
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
166
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
167
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
168
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
169
+ "model.layers.20.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
170
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
171
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
172
+ "model.layers.20.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
173
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
174
+ "model.layers.20.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
175
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
176
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors",
177
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
178
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
179
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
180
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
181
+ "model.layers.21.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
182
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
183
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
184
+ "model.layers.21.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
185
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
186
+ "model.layers.21.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
187
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
188
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors",
189
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
190
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
191
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
192
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
193
+ "model.layers.22.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
194
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
195
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
196
+ "model.layers.22.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
197
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
198
+ "model.layers.22.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
199
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
200
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors",
201
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
202
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
203
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
204
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
205
+ "model.layers.23.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
206
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
207
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
208
+ "model.layers.23.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
209
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
210
+ "model.layers.23.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
211
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
212
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors",
213
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
214
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
215
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
216
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
217
+ "model.layers.24.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
218
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
219
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
220
+ "model.layers.24.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
221
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
222
+ "model.layers.24.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
223
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
224
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors",
225
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
226
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
227
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
228
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
229
+ "model.layers.25.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
230
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
231
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
232
+ "model.layers.25.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
233
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
234
+ "model.layers.25.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
235
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
236
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors",
237
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
238
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
239
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
240
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
241
+ "model.layers.26.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
242
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
243
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
244
+ "model.layers.26.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
245
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
246
+ "model.layers.26.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
247
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
248
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors",
249
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors",
250
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors",
251
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors",
252
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors",
253
+ "model.layers.27.self_attn.k_proj.bias": "model-00003-of-00004.safetensors",
254
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors",
255
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors",
256
+ "model.layers.27.self_attn.q_proj.bias": "model-00003-of-00004.safetensors",
257
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors",
258
+ "model.layers.27.self_attn.v_proj.bias": "model-00003-of-00004.safetensors",
259
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors",
260
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors",
261
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
262
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
263
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
264
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
265
+ "model.layers.3.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
266
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
267
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
268
+ "model.layers.3.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
269
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
270
+ "model.layers.3.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
271
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
272
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors",
273
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
274
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
275
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
276
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
277
+ "model.layers.4.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
278
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
279
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
280
+ "model.layers.4.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
281
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
282
+ "model.layers.4.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
283
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
284
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors",
285
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
286
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
287
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
288
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
289
+ "model.layers.5.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
290
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
291
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
292
+ "model.layers.5.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
293
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
294
+ "model.layers.5.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
295
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
296
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors",
297
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
298
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
299
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
300
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
301
+ "model.layers.6.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
302
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
303
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
304
+ "model.layers.6.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
305
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
306
+ "model.layers.6.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
307
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
308
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00004.safetensors",
309
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00004.safetensors",
310
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors",
311
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors",
312
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00004.safetensors",
313
+ "model.layers.7.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
314
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
315
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
316
+ "model.layers.7.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
317
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
318
+ "model.layers.7.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
319
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
320
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors",
321
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
322
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
323
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
324
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
325
+ "model.layers.8.self_attn.k_proj.bias": "model-00001-of-00004.safetensors",
326
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00004.safetensors",
327
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00004.safetensors",
328
+ "model.layers.8.self_attn.q_proj.bias": "model-00001-of-00004.safetensors",
329
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00004.safetensors",
330
+ "model.layers.8.self_attn.v_proj.bias": "model-00001-of-00004.safetensors",
331
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00004.safetensors",
332
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors",
333
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors",
334
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors",
335
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors",
336
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors",
337
+ "model.layers.9.self_attn.k_proj.bias": "model-00002-of-00004.safetensors",
338
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors",
339
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors",
340
+ "model.layers.9.self_attn.q_proj.bias": "model-00002-of-00004.safetensors",
341
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors",
342
+ "model.layers.9.self_attn.v_proj.bias": "model-00002-of-00004.safetensors",
343
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors",
344
+ "model.norm.weight": "model-00003-of-00004.safetensors"
345
+ }
346
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": {
25
+ "content": "<pad>",
26
+ "lstrip": false,
27
+ "normalized": false,
28
+ "rstrip": false,
29
+ "single_word": false
30
+ }
31
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,215 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<pad>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": true
188
+ }
189
+ },
190
+ "additional_special_tokens": [
191
+ "<|im_start|>",
192
+ "<|im_end|>",
193
+ "<|object_ref_start|>",
194
+ "<|object_ref_end|>",
195
+ "<|box_start|>",
196
+ "<|box_end|>",
197
+ "<|quad_start|>",
198
+ "<|quad_end|>",
199
+ "<|vision_start|>",
200
+ "<|vision_end|>",
201
+ "<|vision_pad|>",
202
+ "<|image_pad|>",
203
+ "<|video_pad|>"
204
+ ],
205
+ "bos_token": null,
206
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
207
+ "clean_up_tokenization_spaces": false,
208
+ "eos_token": "<|im_end|>",
209
+ "errors": "replace",
210
+ "model_max_length": 131072,
211
+ "pad_token": "<pad>",
212
+ "split_special_tokens": false,
213
+ "tokenizer_class": "Qwen2Tokenizer",
214
+ "unk_token": null
215
+ }
trainer_state.json ADDED
@@ -0,0 +1,3919 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.0014906547414286,
5
+ "eval_steps": 69,
6
+ "global_step": 546,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.003663423010875787,
13
+ "grad_norm": 1.5390625,
14
+ "learning_rate": 1.8518518518518518e-07,
15
+ "loss": 1.539,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.003663423010875787,
20
+ "eval_loss": 1.5559252500534058,
21
+ "eval_runtime": 86.4411,
22
+ "eval_samples_per_second": 8.966,
23
+ "eval_steps_per_second": 4.489,
24
+ "step": 1
25
+ },
26
+ {
27
+ "epoch": 0.007326846021751574,
28
+ "grad_norm": 1.6796875,
29
+ "learning_rate": 3.7037037037037036e-07,
30
+ "loss": 1.6519,
31
+ "step": 2
32
+ },
33
+ {
34
+ "epoch": 0.010990269032627361,
35
+ "grad_norm": 1.6015625,
36
+ "learning_rate": 5.555555555555555e-07,
37
+ "loss": 1.6261,
38
+ "step": 3
39
+ },
40
+ {
41
+ "epoch": 0.014653692043503148,
42
+ "grad_norm": 1.5625,
43
+ "learning_rate": 7.407407407407407e-07,
44
+ "loss": 1.6201,
45
+ "step": 4
46
+ },
47
+ {
48
+ "epoch": 0.018317115054378934,
49
+ "grad_norm": 1.5078125,
50
+ "learning_rate": 9.259259259259259e-07,
51
+ "loss": 1.6406,
52
+ "step": 5
53
+ },
54
+ {
55
+ "epoch": 0.021980538065254723,
56
+ "grad_norm": 1.484375,
57
+ "learning_rate": 1.111111111111111e-06,
58
+ "loss": 1.6432,
59
+ "step": 6
60
+ },
61
+ {
62
+ "epoch": 0.025643961076130508,
63
+ "grad_norm": 1.5,
64
+ "learning_rate": 1.2962962962962962e-06,
65
+ "loss": 1.6198,
66
+ "step": 7
67
+ },
68
+ {
69
+ "epoch": 0.029307384087006297,
70
+ "grad_norm": 1.4453125,
71
+ "learning_rate": 1.4814814814814815e-06,
72
+ "loss": 1.6053,
73
+ "step": 8
74
+ },
75
+ {
76
+ "epoch": 0.03297080709788208,
77
+ "grad_norm": 1.4609375,
78
+ "learning_rate": 1.6666666666666667e-06,
79
+ "loss": 1.5996,
80
+ "step": 9
81
+ },
82
+ {
83
+ "epoch": 0.03663423010875787,
84
+ "grad_norm": 1.359375,
85
+ "learning_rate": 1.8518518518518519e-06,
86
+ "loss": 1.5756,
87
+ "step": 10
88
+ },
89
+ {
90
+ "epoch": 0.04029765311963366,
91
+ "grad_norm": 1.3046875,
92
+ "learning_rate": 2.037037037037037e-06,
93
+ "loss": 1.6125,
94
+ "step": 11
95
+ },
96
+ {
97
+ "epoch": 0.043961076130509445,
98
+ "grad_norm": 1.296875,
99
+ "learning_rate": 2.222222222222222e-06,
100
+ "loss": 1.6129,
101
+ "step": 12
102
+ },
103
+ {
104
+ "epoch": 0.04762449914138523,
105
+ "grad_norm": 1.265625,
106
+ "learning_rate": 2.4074074074074075e-06,
107
+ "loss": 1.6614,
108
+ "step": 13
109
+ },
110
+ {
111
+ "epoch": 0.051287922152261016,
112
+ "grad_norm": 1.5546875,
113
+ "learning_rate": 2.5925925925925925e-06,
114
+ "loss": 1.5732,
115
+ "step": 14
116
+ },
117
+ {
118
+ "epoch": 0.05495134516313681,
119
+ "grad_norm": 1.2578125,
120
+ "learning_rate": 2.7777777777777783e-06,
121
+ "loss": 1.6096,
122
+ "step": 15
123
+ },
124
+ {
125
+ "epoch": 0.058614768174012594,
126
+ "grad_norm": 1.109375,
127
+ "learning_rate": 2.962962962962963e-06,
128
+ "loss": 1.6093,
129
+ "step": 16
130
+ },
131
+ {
132
+ "epoch": 0.06227819118488838,
133
+ "grad_norm": 1.1328125,
134
+ "learning_rate": 3.1481481481481483e-06,
135
+ "loss": 1.5846,
136
+ "step": 17
137
+ },
138
+ {
139
+ "epoch": 0.06594161419576416,
140
+ "grad_norm": 1.03125,
141
+ "learning_rate": 3.3333333333333333e-06,
142
+ "loss": 1.6201,
143
+ "step": 18
144
+ },
145
+ {
146
+ "epoch": 0.06960503720663995,
147
+ "grad_norm": 1.15625,
148
+ "learning_rate": 3.5185185185185187e-06,
149
+ "loss": 1.6206,
150
+ "step": 19
151
+ },
152
+ {
153
+ "epoch": 0.07326846021751574,
154
+ "grad_norm": 1.09375,
155
+ "learning_rate": 3.7037037037037037e-06,
156
+ "loss": 1.6238,
157
+ "step": 20
158
+ },
159
+ {
160
+ "epoch": 0.07693188322839153,
161
+ "grad_norm": 1.21875,
162
+ "learning_rate": 3.88888888888889e-06,
163
+ "loss": 1.6347,
164
+ "step": 21
165
+ },
166
+ {
167
+ "epoch": 0.08059530623926732,
168
+ "grad_norm": 1.0390625,
169
+ "learning_rate": 4.074074074074074e-06,
170
+ "loss": 1.6516,
171
+ "step": 22
172
+ },
173
+ {
174
+ "epoch": 0.0842587292501431,
175
+ "grad_norm": 1.078125,
176
+ "learning_rate": 4.2592592592592596e-06,
177
+ "loss": 1.6333,
178
+ "step": 23
179
+ },
180
+ {
181
+ "epoch": 0.08792215226101889,
182
+ "grad_norm": 0.98046875,
183
+ "learning_rate": 4.444444444444444e-06,
184
+ "loss": 1.6349,
185
+ "step": 24
186
+ },
187
+ {
188
+ "epoch": 0.09158557527189468,
189
+ "grad_norm": 0.9609375,
190
+ "learning_rate": 4.62962962962963e-06,
191
+ "loss": 1.6026,
192
+ "step": 25
193
+ },
194
+ {
195
+ "epoch": 0.09524899828277046,
196
+ "grad_norm": 0.93359375,
197
+ "learning_rate": 4.814814814814815e-06,
198
+ "loss": 1.5571,
199
+ "step": 26
200
+ },
201
+ {
202
+ "epoch": 0.09891242129364625,
203
+ "grad_norm": 0.9609375,
204
+ "learning_rate": 5e-06,
205
+ "loss": 1.582,
206
+ "step": 27
207
+ },
208
+ {
209
+ "epoch": 0.10257584430452203,
210
+ "grad_norm": 0.97265625,
211
+ "learning_rate": 5.185185185185185e-06,
212
+ "loss": 1.6034,
213
+ "step": 28
214
+ },
215
+ {
216
+ "epoch": 0.10623926731539783,
217
+ "grad_norm": 0.93359375,
218
+ "learning_rate": 5.370370370370371e-06,
219
+ "loss": 1.5796,
220
+ "step": 29
221
+ },
222
+ {
223
+ "epoch": 0.10990269032627362,
224
+ "grad_norm": 0.9140625,
225
+ "learning_rate": 5.555555555555557e-06,
226
+ "loss": 1.5657,
227
+ "step": 30
228
+ },
229
+ {
230
+ "epoch": 0.1135661133371494,
231
+ "grad_norm": 0.91015625,
232
+ "learning_rate": 5.740740740740741e-06,
233
+ "loss": 1.6262,
234
+ "step": 31
235
+ },
236
+ {
237
+ "epoch": 0.11722953634802519,
238
+ "grad_norm": 0.95703125,
239
+ "learning_rate": 5.925925925925926e-06,
240
+ "loss": 1.5697,
241
+ "step": 32
242
+ },
243
+ {
244
+ "epoch": 0.12089295935890097,
245
+ "grad_norm": 0.85546875,
246
+ "learning_rate": 6.111111111111112e-06,
247
+ "loss": 1.5783,
248
+ "step": 33
249
+ },
250
+ {
251
+ "epoch": 0.12455638236977676,
252
+ "grad_norm": 0.83203125,
253
+ "learning_rate": 6.296296296296297e-06,
254
+ "loss": 1.5434,
255
+ "step": 34
256
+ },
257
+ {
258
+ "epoch": 0.12821980538065256,
259
+ "grad_norm": 0.83203125,
260
+ "learning_rate": 6.481481481481482e-06,
261
+ "loss": 1.4929,
262
+ "step": 35
263
+ },
264
+ {
265
+ "epoch": 0.13188322839152833,
266
+ "grad_norm": 0.83984375,
267
+ "learning_rate": 6.666666666666667e-06,
268
+ "loss": 1.5474,
269
+ "step": 36
270
+ },
271
+ {
272
+ "epoch": 0.13554665140240413,
273
+ "grad_norm": 0.79296875,
274
+ "learning_rate": 6.851851851851853e-06,
275
+ "loss": 1.5836,
276
+ "step": 37
277
+ },
278
+ {
279
+ "epoch": 0.1392100744132799,
280
+ "grad_norm": 0.91015625,
281
+ "learning_rate": 7.0370370370370375e-06,
282
+ "loss": 1.5739,
283
+ "step": 38
284
+ },
285
+ {
286
+ "epoch": 0.1428734974241557,
287
+ "grad_norm": 0.78515625,
288
+ "learning_rate": 7.222222222222223e-06,
289
+ "loss": 1.5365,
290
+ "step": 39
291
+ },
292
+ {
293
+ "epoch": 0.14653692043503147,
294
+ "grad_norm": 0.82421875,
295
+ "learning_rate": 7.4074074074074075e-06,
296
+ "loss": 1.5932,
297
+ "step": 40
298
+ },
299
+ {
300
+ "epoch": 0.15020034344590727,
301
+ "grad_norm": 0.80078125,
302
+ "learning_rate": 7.592592592592594e-06,
303
+ "loss": 1.5309,
304
+ "step": 41
305
+ },
306
+ {
307
+ "epoch": 0.15386376645678307,
308
+ "grad_norm": 0.8203125,
309
+ "learning_rate": 7.77777777777778e-06,
310
+ "loss": 1.5373,
311
+ "step": 42
312
+ },
313
+ {
314
+ "epoch": 0.15752718946765884,
315
+ "grad_norm": 0.78125,
316
+ "learning_rate": 7.962962962962963e-06,
317
+ "loss": 1.5511,
318
+ "step": 43
319
+ },
320
+ {
321
+ "epoch": 0.16119061247853464,
322
+ "grad_norm": 0.83203125,
323
+ "learning_rate": 8.148148148148148e-06,
324
+ "loss": 1.5749,
325
+ "step": 44
326
+ },
327
+ {
328
+ "epoch": 0.1648540354894104,
329
+ "grad_norm": 1.375,
330
+ "learning_rate": 8.333333333333334e-06,
331
+ "loss": 1.5303,
332
+ "step": 45
333
+ },
334
+ {
335
+ "epoch": 0.1685174585002862,
336
+ "grad_norm": 0.8203125,
337
+ "learning_rate": 8.518518518518519e-06,
338
+ "loss": 1.6222,
339
+ "step": 46
340
+ },
341
+ {
342
+ "epoch": 0.17218088151116198,
343
+ "grad_norm": 0.87109375,
344
+ "learning_rate": 8.703703703703705e-06,
345
+ "loss": 1.5169,
346
+ "step": 47
347
+ },
348
+ {
349
+ "epoch": 0.17584430452203778,
350
+ "grad_norm": 0.8671875,
351
+ "learning_rate": 8.888888888888888e-06,
352
+ "loss": 1.5518,
353
+ "step": 48
354
+ },
355
+ {
356
+ "epoch": 0.17950772753291355,
357
+ "grad_norm": 0.8359375,
358
+ "learning_rate": 9.074074074074075e-06,
359
+ "loss": 1.5487,
360
+ "step": 49
361
+ },
362
+ {
363
+ "epoch": 0.18317115054378935,
364
+ "grad_norm": 0.8515625,
365
+ "learning_rate": 9.25925925925926e-06,
366
+ "loss": 1.5546,
367
+ "step": 50
368
+ },
369
+ {
370
+ "epoch": 0.18683457355466515,
371
+ "grad_norm": 0.78125,
372
+ "learning_rate": 9.444444444444445e-06,
373
+ "loss": 1.5225,
374
+ "step": 51
375
+ },
376
+ {
377
+ "epoch": 0.19049799656554092,
378
+ "grad_norm": 0.73046875,
379
+ "learning_rate": 9.62962962962963e-06,
380
+ "loss": 1.5685,
381
+ "step": 52
382
+ },
383
+ {
384
+ "epoch": 0.19416141957641672,
385
+ "grad_norm": 0.73828125,
386
+ "learning_rate": 9.814814814814815e-06,
387
+ "loss": 1.5496,
388
+ "step": 53
389
+ },
390
+ {
391
+ "epoch": 0.1978248425872925,
392
+ "grad_norm": 0.83984375,
393
+ "learning_rate": 1e-05,
394
+ "loss": 1.4815,
395
+ "step": 54
396
+ },
397
+ {
398
+ "epoch": 0.2014882655981683,
399
+ "grad_norm": 0.71484375,
400
+ "learning_rate": 9.999898068580346e-06,
401
+ "loss": 1.5171,
402
+ "step": 55
403
+ },
404
+ {
405
+ "epoch": 0.20515168860904406,
406
+ "grad_norm": 0.7734375,
407
+ "learning_rate": 9.999592278477389e-06,
408
+ "loss": 1.5559,
409
+ "step": 56
410
+ },
411
+ {
412
+ "epoch": 0.20881511161991986,
413
+ "grad_norm": 0.76171875,
414
+ "learning_rate": 9.999082642158972e-06,
415
+ "loss": 1.5364,
416
+ "step": 57
417
+ },
418
+ {
419
+ "epoch": 0.21247853463079566,
420
+ "grad_norm": 0.76171875,
421
+ "learning_rate": 9.998369180404283e-06,
422
+ "loss": 1.5392,
423
+ "step": 58
424
+ },
425
+ {
426
+ "epoch": 0.21614195764167143,
427
+ "grad_norm": 0.7109375,
428
+ "learning_rate": 9.997451922302987e-06,
429
+ "loss": 1.5376,
430
+ "step": 59
431
+ },
432
+ {
433
+ "epoch": 0.21980538065254723,
434
+ "grad_norm": 0.765625,
435
+ "learning_rate": 9.99633090525405e-06,
436
+ "loss": 1.5128,
437
+ "step": 60
438
+ },
439
+ {
440
+ "epoch": 0.223468803663423,
441
+ "grad_norm": 0.78515625,
442
+ "learning_rate": 9.99500617496422e-06,
443
+ "loss": 1.4922,
444
+ "step": 61
445
+ },
446
+ {
447
+ "epoch": 0.2271322266742988,
448
+ "grad_norm": 0.75,
449
+ "learning_rate": 9.993477785446151e-06,
450
+ "loss": 1.5314,
451
+ "step": 62
452
+ },
453
+ {
454
+ "epoch": 0.23079564968517458,
455
+ "grad_norm": 0.78515625,
456
+ "learning_rate": 9.991745799016206e-06,
457
+ "loss": 1.5243,
458
+ "step": 63
459
+ },
460
+ {
461
+ "epoch": 0.23445907269605037,
462
+ "grad_norm": 0.73828125,
463
+ "learning_rate": 9.989810286291923e-06,
464
+ "loss": 1.5216,
465
+ "step": 64
466
+ },
467
+ {
468
+ "epoch": 0.23812249570692615,
469
+ "grad_norm": 0.76953125,
470
+ "learning_rate": 9.987671326189123e-06,
471
+ "loss": 1.4933,
472
+ "step": 65
473
+ },
474
+ {
475
+ "epoch": 0.24178591871780195,
476
+ "grad_norm": 0.73828125,
477
+ "learning_rate": 9.985329005918702e-06,
478
+ "loss": 1.5784,
479
+ "step": 66
480
+ },
481
+ {
482
+ "epoch": 0.24544934172867774,
483
+ "grad_norm": 0.73046875,
484
+ "learning_rate": 9.982783420983075e-06,
485
+ "loss": 1.5277,
486
+ "step": 67
487
+ },
488
+ {
489
+ "epoch": 0.24911276473955352,
490
+ "grad_norm": 0.765625,
491
+ "learning_rate": 9.980034675172274e-06,
492
+ "loss": 1.5479,
493
+ "step": 68
494
+ },
495
+ {
496
+ "epoch": 0.2527761877504293,
497
+ "grad_norm": 0.75390625,
498
+ "learning_rate": 9.977082880559725e-06,
499
+ "loss": 1.562,
500
+ "step": 69
501
+ },
502
+ {
503
+ "epoch": 0.2527761877504293,
504
+ "eval_loss": 1.4610767364501953,
505
+ "eval_runtime": 86.5737,
506
+ "eval_samples_per_second": 8.952,
507
+ "eval_steps_per_second": 4.482,
508
+ "step": 69
509
+ },
510
+ {
511
+ "epoch": 0.2564396107613051,
512
+ "grad_norm": 0.7421875,
513
+ "learning_rate": 9.973928157497675e-06,
514
+ "loss": 1.5046,
515
+ "step": 70
516
+ },
517
+ {
518
+ "epoch": 0.26010303377218086,
519
+ "grad_norm": 0.828125,
520
+ "learning_rate": 9.970570634612282e-06,
521
+ "loss": 1.5485,
522
+ "step": 71
523
+ },
524
+ {
525
+ "epoch": 0.26376645678305666,
526
+ "grad_norm": 0.73046875,
527
+ "learning_rate": 9.967010448798376e-06,
528
+ "loss": 1.5255,
529
+ "step": 72
530
+ },
531
+ {
532
+ "epoch": 0.26742987979393246,
533
+ "grad_norm": 0.73828125,
534
+ "learning_rate": 9.963247745213876e-06,
535
+ "loss": 1.5434,
536
+ "step": 73
537
+ },
538
+ {
539
+ "epoch": 0.27109330280480826,
540
+ "grad_norm": 0.85546875,
541
+ "learning_rate": 9.959282677273869e-06,
542
+ "loss": 1.5317,
543
+ "step": 74
544
+ },
545
+ {
546
+ "epoch": 0.27475672581568406,
547
+ "grad_norm": 0.74609375,
548
+ "learning_rate": 9.955115406644357e-06,
549
+ "loss": 1.5696,
550
+ "step": 75
551
+ },
552
+ {
553
+ "epoch": 0.2784201488265598,
554
+ "grad_norm": 0.91015625,
555
+ "learning_rate": 9.950746103235663e-06,
556
+ "loss": 1.59,
557
+ "step": 76
558
+ },
559
+ {
560
+ "epoch": 0.2820835718374356,
561
+ "grad_norm": 1.0703125,
562
+ "learning_rate": 9.946174945195508e-06,
563
+ "loss": 1.4958,
564
+ "step": 77
565
+ },
566
+ {
567
+ "epoch": 0.2857469948483114,
568
+ "grad_norm": 0.76171875,
569
+ "learning_rate": 9.941402118901743e-06,
570
+ "loss": 1.5623,
571
+ "step": 78
572
+ },
573
+ {
574
+ "epoch": 0.2894104178591872,
575
+ "grad_norm": 0.78125,
576
+ "learning_rate": 9.936427818954753e-06,
577
+ "loss": 1.5951,
578
+ "step": 79
579
+ },
580
+ {
581
+ "epoch": 0.29307384087006294,
582
+ "grad_norm": 0.734375,
583
+ "learning_rate": 9.931252248169518e-06,
584
+ "loss": 1.5174,
585
+ "step": 80
586
+ },
587
+ {
588
+ "epoch": 0.29673726388093874,
589
+ "grad_norm": 0.74609375,
590
+ "learning_rate": 9.92587561756735e-06,
591
+ "loss": 1.5268,
592
+ "step": 81
593
+ },
594
+ {
595
+ "epoch": 0.30040068689181454,
596
+ "grad_norm": 0.78125,
597
+ "learning_rate": 9.920298146367287e-06,
598
+ "loss": 1.5133,
599
+ "step": 82
600
+ },
601
+ {
602
+ "epoch": 0.30406410990269034,
603
+ "grad_norm": 0.73046875,
604
+ "learning_rate": 9.91452006197715e-06,
605
+ "loss": 1.4929,
606
+ "step": 83
607
+ },
608
+ {
609
+ "epoch": 0.30772753291356614,
610
+ "grad_norm": 0.77734375,
611
+ "learning_rate": 9.908541599984276e-06,
612
+ "loss": 1.4949,
613
+ "step": 84
614
+ },
615
+ {
616
+ "epoch": 0.3113909559244419,
617
+ "grad_norm": 0.73046875,
618
+ "learning_rate": 9.902363004145914e-06,
619
+ "loss": 1.5007,
620
+ "step": 85
621
+ },
622
+ {
623
+ "epoch": 0.3150543789353177,
624
+ "grad_norm": 0.7734375,
625
+ "learning_rate": 9.895984526379282e-06,
626
+ "loss": 1.5442,
627
+ "step": 86
628
+ },
629
+ {
630
+ "epoch": 0.3187178019461935,
631
+ "grad_norm": 0.83203125,
632
+ "learning_rate": 9.889406426751296e-06,
633
+ "loss": 1.4796,
634
+ "step": 87
635
+ },
636
+ {
637
+ "epoch": 0.3223812249570693,
638
+ "grad_norm": 0.8125,
639
+ "learning_rate": 9.882628973467972e-06,
640
+ "loss": 1.5317,
641
+ "step": 88
642
+ },
643
+ {
644
+ "epoch": 0.326044647967945,
645
+ "grad_norm": 0.7421875,
646
+ "learning_rate": 9.875652442863483e-06,
647
+ "loss": 1.511,
648
+ "step": 89
649
+ },
650
+ {
651
+ "epoch": 0.3297080709788208,
652
+ "grad_norm": 0.8046875,
653
+ "learning_rate": 9.868477119388897e-06,
654
+ "loss": 1.5016,
655
+ "step": 90
656
+ },
657
+ {
658
+ "epoch": 0.3333714939896966,
659
+ "grad_norm": 0.76953125,
660
+ "learning_rate": 9.861103295600574e-06,
661
+ "loss": 1.5879,
662
+ "step": 91
663
+ },
664
+ {
665
+ "epoch": 0.3370349170005724,
666
+ "grad_norm": 0.859375,
667
+ "learning_rate": 9.853531272148248e-06,
668
+ "loss": 1.5265,
669
+ "step": 92
670
+ },
671
+ {
672
+ "epoch": 0.3406983400114482,
673
+ "grad_norm": 0.75,
674
+ "learning_rate": 9.84576135776276e-06,
675
+ "loss": 1.5244,
676
+ "step": 93
677
+ },
678
+ {
679
+ "epoch": 0.34436176302232396,
680
+ "grad_norm": 0.74609375,
681
+ "learning_rate": 9.837793869243468e-06,
682
+ "loss": 1.5531,
683
+ "step": 94
684
+ },
685
+ {
686
+ "epoch": 0.34802518603319976,
687
+ "grad_norm": 0.75390625,
688
+ "learning_rate": 9.829629131445342e-06,
689
+ "loss": 1.5195,
690
+ "step": 95
691
+ },
692
+ {
693
+ "epoch": 0.35168860904407556,
694
+ "grad_norm": 0.7109375,
695
+ "learning_rate": 9.821267477265705e-06,
696
+ "loss": 1.54,
697
+ "step": 96
698
+ },
699
+ {
700
+ "epoch": 0.35535203205495136,
701
+ "grad_norm": 0.67578125,
702
+ "learning_rate": 9.812709247630671e-06,
703
+ "loss": 1.4454,
704
+ "step": 97
705
+ },
706
+ {
707
+ "epoch": 0.3590154550658271,
708
+ "grad_norm": 0.765625,
709
+ "learning_rate": 9.803954791481239e-06,
710
+ "loss": 1.4574,
711
+ "step": 98
712
+ },
713
+ {
714
+ "epoch": 0.3626788780767029,
715
+ "grad_norm": 0.73828125,
716
+ "learning_rate": 9.795004465759067e-06,
717
+ "loss": 1.5201,
718
+ "step": 99
719
+ },
720
+ {
721
+ "epoch": 0.3663423010875787,
722
+ "grad_norm": 0.73828125,
723
+ "learning_rate": 9.785858635391913e-06,
724
+ "loss": 1.4939,
725
+ "step": 100
726
+ },
727
+ {
728
+ "epoch": 0.3700057240984545,
729
+ "grad_norm": 0.6953125,
730
+ "learning_rate": 9.776517673278772e-06,
731
+ "loss": 1.5067,
732
+ "step": 101
733
+ },
734
+ {
735
+ "epoch": 0.3736691471093303,
736
+ "grad_norm": 0.75390625,
737
+ "learning_rate": 9.766981960274653e-06,
738
+ "loss": 1.4932,
739
+ "step": 102
740
+ },
741
+ {
742
+ "epoch": 0.37733257012020605,
743
+ "grad_norm": 0.78125,
744
+ "learning_rate": 9.757251885175063e-06,
745
+ "loss": 1.4845,
746
+ "step": 103
747
+ },
748
+ {
749
+ "epoch": 0.38099599313108184,
750
+ "grad_norm": 0.7109375,
751
+ "learning_rate": 9.747327844700147e-06,
752
+ "loss": 1.5307,
753
+ "step": 104
754
+ },
755
+ {
756
+ "epoch": 0.38465941614195764,
757
+ "grad_norm": 0.75390625,
758
+ "learning_rate": 9.737210243478522e-06,
759
+ "loss": 1.4548,
760
+ "step": 105
761
+ },
762
+ {
763
+ "epoch": 0.38832283915283344,
764
+ "grad_norm": 0.72265625,
765
+ "learning_rate": 9.726899494030768e-06,
766
+ "loss": 1.4711,
767
+ "step": 106
768
+ },
769
+ {
770
+ "epoch": 0.39198626216370924,
771
+ "grad_norm": 0.70703125,
772
+ "learning_rate": 9.716396016752616e-06,
773
+ "loss": 1.4636,
774
+ "step": 107
775
+ },
776
+ {
777
+ "epoch": 0.395649685174585,
778
+ "grad_norm": 0.7421875,
779
+ "learning_rate": 9.705700239897809e-06,
780
+ "loss": 1.499,
781
+ "step": 108
782
+ },
783
+ {
784
+ "epoch": 0.3993131081854608,
785
+ "grad_norm": 0.77734375,
786
+ "learning_rate": 9.694812599560632e-06,
787
+ "loss": 1.5242,
788
+ "step": 109
789
+ },
790
+ {
791
+ "epoch": 0.4029765311963366,
792
+ "grad_norm": 0.75390625,
793
+ "learning_rate": 9.68373353965814e-06,
794
+ "loss": 1.5273,
795
+ "step": 110
796
+ },
797
+ {
798
+ "epoch": 0.4066399542072124,
799
+ "grad_norm": 0.71875,
800
+ "learning_rate": 9.672463511912056e-06,
801
+ "loss": 1.5333,
802
+ "step": 111
803
+ },
804
+ {
805
+ "epoch": 0.41030337721808813,
806
+ "grad_norm": 0.703125,
807
+ "learning_rate": 9.66100297583035e-06,
808
+ "loss": 1.4781,
809
+ "step": 112
810
+ },
811
+ {
812
+ "epoch": 0.4139668002289639,
813
+ "grad_norm": 0.69921875,
814
+ "learning_rate": 9.649352398688506e-06,
815
+ "loss": 1.5159,
816
+ "step": 113
817
+ },
818
+ {
819
+ "epoch": 0.4176302232398397,
820
+ "grad_norm": 0.703125,
821
+ "learning_rate": 9.637512255510475e-06,
822
+ "loss": 1.5112,
823
+ "step": 114
824
+ },
825
+ {
826
+ "epoch": 0.4212936462507155,
827
+ "grad_norm": 0.79296875,
828
+ "learning_rate": 9.625483029049295e-06,
829
+ "loss": 1.5399,
830
+ "step": 115
831
+ },
832
+ {
833
+ "epoch": 0.4249570692615913,
834
+ "grad_norm": 0.73828125,
835
+ "learning_rate": 9.613265209767417e-06,
836
+ "loss": 1.521,
837
+ "step": 116
838
+ },
839
+ {
840
+ "epoch": 0.42862049227246707,
841
+ "grad_norm": 0.7265625,
842
+ "learning_rate": 9.600859295816708e-06,
843
+ "loss": 1.4956,
844
+ "step": 117
845
+ },
846
+ {
847
+ "epoch": 0.43228391528334287,
848
+ "grad_norm": 0.7109375,
849
+ "learning_rate": 9.588265793018141e-06,
850
+ "loss": 1.4859,
851
+ "step": 118
852
+ },
853
+ {
854
+ "epoch": 0.43594733829421867,
855
+ "grad_norm": 0.68359375,
856
+ "learning_rate": 9.575485214841158e-06,
857
+ "loss": 1.4867,
858
+ "step": 119
859
+ },
860
+ {
861
+ "epoch": 0.43961076130509447,
862
+ "grad_norm": 0.85546875,
863
+ "learning_rate": 9.562518082382751e-06,
864
+ "loss": 1.488,
865
+ "step": 120
866
+ },
867
+ {
868
+ "epoch": 0.4432741843159702,
869
+ "grad_norm": 0.79296875,
870
+ "learning_rate": 9.54936492434621e-06,
871
+ "loss": 1.5129,
872
+ "step": 121
873
+ },
874
+ {
875
+ "epoch": 0.446937607326846,
876
+ "grad_norm": 0.7578125,
877
+ "learning_rate": 9.536026277019562e-06,
878
+ "loss": 1.5484,
879
+ "step": 122
880
+ },
881
+ {
882
+ "epoch": 0.4506010303377218,
883
+ "grad_norm": 0.71875,
884
+ "learning_rate": 9.522502684253709e-06,
885
+ "loss": 1.5017,
886
+ "step": 123
887
+ },
888
+ {
889
+ "epoch": 0.4542644533485976,
890
+ "grad_norm": 0.73828125,
891
+ "learning_rate": 9.508794697440257e-06,
892
+ "loss": 1.4869,
893
+ "step": 124
894
+ },
895
+ {
896
+ "epoch": 0.4579278763594734,
897
+ "grad_norm": 0.7578125,
898
+ "learning_rate": 9.494902875489031e-06,
899
+ "loss": 1.5279,
900
+ "step": 125
901
+ },
902
+ {
903
+ "epoch": 0.46159129937034915,
904
+ "grad_norm": 0.7109375,
905
+ "learning_rate": 9.480827784805278e-06,
906
+ "loss": 1.506,
907
+ "step": 126
908
+ },
909
+ {
910
+ "epoch": 0.46525472238122495,
911
+ "grad_norm": 0.7578125,
912
+ "learning_rate": 9.466569999266595e-06,
913
+ "loss": 1.4857,
914
+ "step": 127
915
+ },
916
+ {
917
+ "epoch": 0.46891814539210075,
918
+ "grad_norm": 0.69140625,
919
+ "learning_rate": 9.452130100199504e-06,
920
+ "loss": 1.5652,
921
+ "step": 128
922
+ },
923
+ {
924
+ "epoch": 0.47258156840297655,
925
+ "grad_norm": 0.75,
926
+ "learning_rate": 9.437508676355774e-06,
927
+ "loss": 1.5093,
928
+ "step": 129
929
+ },
930
+ {
931
+ "epoch": 0.4762449914138523,
932
+ "grad_norm": 0.75,
933
+ "learning_rate": 9.422706323888398e-06,
934
+ "loss": 1.4592,
935
+ "step": 130
936
+ },
937
+ {
938
+ "epoch": 0.4799084144247281,
939
+ "grad_norm": 0.734375,
940
+ "learning_rate": 9.407723646327299e-06,
941
+ "loss": 1.4626,
942
+ "step": 131
943
+ },
944
+ {
945
+ "epoch": 0.4835718374356039,
946
+ "grad_norm": 0.68359375,
947
+ "learning_rate": 9.392561254554712e-06,
948
+ "loss": 1.5082,
949
+ "step": 132
950
+ },
951
+ {
952
+ "epoch": 0.4872352604464797,
953
+ "grad_norm": 0.703125,
954
+ "learning_rate": 9.377219766780288e-06,
955
+ "loss": 1.5389,
956
+ "step": 133
957
+ },
958
+ {
959
+ "epoch": 0.4908986834573555,
960
+ "grad_norm": 0.66015625,
961
+ "learning_rate": 9.361699808515877e-06,
962
+ "loss": 1.4567,
963
+ "step": 134
964
+ },
965
+ {
966
+ "epoch": 0.49456210646823123,
967
+ "grad_norm": 0.6796875,
968
+ "learning_rate": 9.346002012550027e-06,
969
+ "loss": 1.4903,
970
+ "step": 135
971
+ },
972
+ {
973
+ "epoch": 0.49822552947910703,
974
+ "grad_norm": 0.69921875,
975
+ "learning_rate": 9.330127018922195e-06,
976
+ "loss": 1.5068,
977
+ "step": 136
978
+ },
979
+ {
980
+ "epoch": 0.5018889524899828,
981
+ "grad_norm": 0.6953125,
982
+ "learning_rate": 9.314075474896631e-06,
983
+ "loss": 1.5039,
984
+ "step": 137
985
+ },
986
+ {
987
+ "epoch": 0.5055523755008586,
988
+ "grad_norm": 0.7265625,
989
+ "learning_rate": 9.297848034936007e-06,
990
+ "loss": 1.4928,
991
+ "step": 138
992
+ },
993
+ {
994
+ "epoch": 0.5055523755008586,
995
+ "eval_loss": 1.4303784370422363,
996
+ "eval_runtime": 86.5297,
997
+ "eval_samples_per_second": 8.956,
998
+ "eval_steps_per_second": 4.484,
999
+ "step": 138
1000
+ },
1001
+ {
1002
+ "epoch": 0.5092157985117344,
1003
+ "grad_norm": 0.734375,
1004
+ "learning_rate": 9.281445360674717e-06,
1005
+ "loss": 1.5562,
1006
+ "step": 139
1007
+ },
1008
+ {
1009
+ "epoch": 0.5128792215226102,
1010
+ "grad_norm": 0.6875,
1011
+ "learning_rate": 9.264868120891913e-06,
1012
+ "loss": 1.4492,
1013
+ "step": 140
1014
+ },
1015
+ {
1016
+ "epoch": 0.516542644533486,
1017
+ "grad_norm": 0.69140625,
1018
+ "learning_rate": 9.24811699148423e-06,
1019
+ "loss": 1.4349,
1020
+ "step": 141
1021
+ },
1022
+ {
1023
+ "epoch": 0.5202060675443617,
1024
+ "grad_norm": 0.67578125,
1025
+ "learning_rate": 9.231192655438222e-06,
1026
+ "loss": 1.479,
1027
+ "step": 142
1028
+ },
1029
+ {
1030
+ "epoch": 0.5238694905552376,
1031
+ "grad_norm": 0.765625,
1032
+ "learning_rate": 9.214095802802533e-06,
1033
+ "loss": 1.5113,
1034
+ "step": 143
1035
+ },
1036
+ {
1037
+ "epoch": 0.5275329135661133,
1038
+ "grad_norm": 0.75,
1039
+ "learning_rate": 9.196827130659752e-06,
1040
+ "loss": 1.5157,
1041
+ "step": 144
1042
+ },
1043
+ {
1044
+ "epoch": 0.5311963365769892,
1045
+ "grad_norm": 0.69921875,
1046
+ "learning_rate": 9.179387343097978e-06,
1047
+ "loss": 1.4772,
1048
+ "step": 145
1049
+ },
1050
+ {
1051
+ "epoch": 0.5348597595878649,
1052
+ "grad_norm": 0.74609375,
1053
+ "learning_rate": 9.161777151182137e-06,
1054
+ "loss": 1.5372,
1055
+ "step": 146
1056
+ },
1057
+ {
1058
+ "epoch": 0.5385231825987407,
1059
+ "grad_norm": 0.6796875,
1060
+ "learning_rate": 9.143997272924974e-06,
1061
+ "loss": 1.4623,
1062
+ "step": 147
1063
+ },
1064
+ {
1065
+ "epoch": 0.5421866056096165,
1066
+ "grad_norm": 0.7109375,
1067
+ "learning_rate": 9.12604843325778e-06,
1068
+ "loss": 1.5204,
1069
+ "step": 148
1070
+ },
1071
+ {
1072
+ "epoch": 0.5458500286204923,
1073
+ "grad_norm": 0.72265625,
1074
+ "learning_rate": 9.10793136400084e-06,
1075
+ "loss": 1.5264,
1076
+ "step": 149
1077
+ },
1078
+ {
1079
+ "epoch": 0.5495134516313681,
1080
+ "grad_norm": 0.796875,
1081
+ "learning_rate": 9.089646803833589e-06,
1082
+ "loss": 1.467,
1083
+ "step": 150
1084
+ },
1085
+ {
1086
+ "epoch": 0.5531768746422439,
1087
+ "grad_norm": 0.6875,
1088
+ "learning_rate": 9.071195498264497e-06,
1089
+ "loss": 1.4847,
1090
+ "step": 151
1091
+ },
1092
+ {
1093
+ "epoch": 0.5568402976531196,
1094
+ "grad_norm": 0.71875,
1095
+ "learning_rate": 9.052578199600675e-06,
1096
+ "loss": 1.4867,
1097
+ "step": 152
1098
+ },
1099
+ {
1100
+ "epoch": 0.5605037206639955,
1101
+ "grad_norm": 0.73828125,
1102
+ "learning_rate": 9.033795666917191e-06,
1103
+ "loss": 1.49,
1104
+ "step": 153
1105
+ },
1106
+ {
1107
+ "epoch": 0.5641671436748712,
1108
+ "grad_norm": 0.70703125,
1109
+ "learning_rate": 9.014848666026138e-06,
1110
+ "loss": 1.4984,
1111
+ "step": 154
1112
+ },
1113
+ {
1114
+ "epoch": 0.5678305666857469,
1115
+ "grad_norm": 0.82421875,
1116
+ "learning_rate": 8.995737969445395e-06,
1117
+ "loss": 1.4957,
1118
+ "step": 155
1119
+ },
1120
+ {
1121
+ "epoch": 0.5714939896966228,
1122
+ "grad_norm": 0.70703125,
1123
+ "learning_rate": 8.976464356367133e-06,
1124
+ "loss": 1.4975,
1125
+ "step": 156
1126
+ },
1127
+ {
1128
+ "epoch": 0.5751574127074985,
1129
+ "grad_norm": 0.7578125,
1130
+ "learning_rate": 8.957028612626051e-06,
1131
+ "loss": 1.5144,
1132
+ "step": 157
1133
+ },
1134
+ {
1135
+ "epoch": 0.5788208357183744,
1136
+ "grad_norm": 0.69140625,
1137
+ "learning_rate": 8.937431530667329e-06,
1138
+ "loss": 1.4688,
1139
+ "step": 158
1140
+ },
1141
+ {
1142
+ "epoch": 0.5824842587292501,
1143
+ "grad_norm": 0.796875,
1144
+ "learning_rate": 8.917673909514321e-06,
1145
+ "loss": 1.4647,
1146
+ "step": 159
1147
+ },
1148
+ {
1149
+ "epoch": 0.5861476817401259,
1150
+ "grad_norm": 0.69140625,
1151
+ "learning_rate": 8.897756554735976e-06,
1152
+ "loss": 1.5029,
1153
+ "step": 160
1154
+ },
1155
+ {
1156
+ "epoch": 0.5898111047510017,
1157
+ "grad_norm": 0.71875,
1158
+ "learning_rate": 8.877680278413995e-06,
1159
+ "loss": 1.4559,
1160
+ "step": 161
1161
+ },
1162
+ {
1163
+ "epoch": 0.5934745277618775,
1164
+ "grad_norm": 0.7265625,
1165
+ "learning_rate": 8.857445899109716e-06,
1166
+ "loss": 1.4876,
1167
+ "step": 162
1168
+ },
1169
+ {
1170
+ "epoch": 0.5971379507727533,
1171
+ "grad_norm": 0.71484375,
1172
+ "learning_rate": 8.83705424183074e-06,
1173
+ "loss": 1.5204,
1174
+ "step": 163
1175
+ },
1176
+ {
1177
+ "epoch": 0.6008013737836291,
1178
+ "grad_norm": 0.75390625,
1179
+ "learning_rate": 8.8165061379973e-06,
1180
+ "loss": 1.5099,
1181
+ "step": 164
1182
+ },
1183
+ {
1184
+ "epoch": 0.6044647967945048,
1185
+ "grad_norm": 0.8203125,
1186
+ "learning_rate": 8.795802425408352e-06,
1187
+ "loss": 1.5189,
1188
+ "step": 165
1189
+ },
1190
+ {
1191
+ "epoch": 0.6081282198053807,
1192
+ "grad_norm": 0.67578125,
1193
+ "learning_rate": 8.774943948207427e-06,
1194
+ "loss": 1.4384,
1195
+ "step": 166
1196
+ },
1197
+ {
1198
+ "epoch": 0.6117916428162564,
1199
+ "grad_norm": 0.7109375,
1200
+ "learning_rate": 8.753931556848195e-06,
1201
+ "loss": 1.4618,
1202
+ "step": 167
1203
+ },
1204
+ {
1205
+ "epoch": 0.6154550658271323,
1206
+ "grad_norm": 0.69140625,
1207
+ "learning_rate": 8.732766108059814e-06,
1208
+ "loss": 1.4531,
1209
+ "step": 168
1210
+ },
1211
+ {
1212
+ "epoch": 0.619118488838008,
1213
+ "grad_norm": 0.71875,
1214
+ "learning_rate": 8.711448464811978e-06,
1215
+ "loss": 1.4737,
1216
+ "step": 169
1217
+ },
1218
+ {
1219
+ "epoch": 0.6227819118488838,
1220
+ "grad_norm": 0.69921875,
1221
+ "learning_rate": 8.689979496279747e-06,
1222
+ "loss": 1.4604,
1223
+ "step": 170
1224
+ },
1225
+ {
1226
+ "epoch": 0.6264453348597596,
1227
+ "grad_norm": 0.7734375,
1228
+ "learning_rate": 8.668360077808093e-06,
1229
+ "loss": 1.488,
1230
+ "step": 171
1231
+ },
1232
+ {
1233
+ "epoch": 0.6301087578706354,
1234
+ "grad_norm": 0.8828125,
1235
+ "learning_rate": 8.646591090876225e-06,
1236
+ "loss": 1.4882,
1237
+ "step": 172
1238
+ },
1239
+ {
1240
+ "epoch": 0.6337721808815112,
1241
+ "grad_norm": 0.76953125,
1242
+ "learning_rate": 8.62467342306164e-06,
1243
+ "loss": 1.4495,
1244
+ "step": 173
1245
+ },
1246
+ {
1247
+ "epoch": 0.637435603892387,
1248
+ "grad_norm": 0.6953125,
1249
+ "learning_rate": 8.602607968003935e-06,
1250
+ "loss": 1.4671,
1251
+ "step": 174
1252
+ },
1253
+ {
1254
+ "epoch": 0.6410990269032627,
1255
+ "grad_norm": 0.71484375,
1256
+ "learning_rate": 8.580395625368377e-06,
1257
+ "loss": 1.4682,
1258
+ "step": 175
1259
+ },
1260
+ {
1261
+ "epoch": 0.6447624499141386,
1262
+ "grad_norm": 0.7734375,
1263
+ "learning_rate": 8.558037300809209e-06,
1264
+ "loss": 1.5226,
1265
+ "step": 176
1266
+ },
1267
+ {
1268
+ "epoch": 0.6484258729250143,
1269
+ "grad_norm": 0.73046875,
1270
+ "learning_rate": 8.535533905932739e-06,
1271
+ "loss": 1.4958,
1272
+ "step": 177
1273
+ },
1274
+ {
1275
+ "epoch": 0.65208929593589,
1276
+ "grad_norm": 0.85546875,
1277
+ "learning_rate": 8.512886358260162e-06,
1278
+ "loss": 1.4589,
1279
+ "step": 178
1280
+ },
1281
+ {
1282
+ "epoch": 0.6557527189467659,
1283
+ "grad_norm": 0.6796875,
1284
+ "learning_rate": 8.490095581190149e-06,
1285
+ "loss": 1.4966,
1286
+ "step": 179
1287
+ },
1288
+ {
1289
+ "epoch": 0.6594161419576416,
1290
+ "grad_norm": 0.73046875,
1291
+ "learning_rate": 8.467162503961209e-06,
1292
+ "loss": 1.4866,
1293
+ "step": 180
1294
+ },
1295
+ {
1296
+ "epoch": 0.6630795649685175,
1297
+ "grad_norm": 0.6953125,
1298
+ "learning_rate": 8.444088061613788e-06,
1299
+ "loss": 1.4029,
1300
+ "step": 181
1301
+ },
1302
+ {
1303
+ "epoch": 0.6667429879793932,
1304
+ "grad_norm": 0.7421875,
1305
+ "learning_rate": 8.420873194952153e-06,
1306
+ "loss": 1.4235,
1307
+ "step": 182
1308
+ },
1309
+ {
1310
+ "epoch": 0.670406410990269,
1311
+ "grad_norm": 0.72265625,
1312
+ "learning_rate": 8.39751885050603e-06,
1313
+ "loss": 1.5138,
1314
+ "step": 183
1315
+ },
1316
+ {
1317
+ "epoch": 0.6740698340011448,
1318
+ "grad_norm": 0.734375,
1319
+ "learning_rate": 8.37402598049201e-06,
1320
+ "loss": 1.4258,
1321
+ "step": 184
1322
+ },
1323
+ {
1324
+ "epoch": 0.6777332570120206,
1325
+ "grad_norm": 0.6875,
1326
+ "learning_rate": 8.350395542774737e-06,
1327
+ "loss": 1.5137,
1328
+ "step": 185
1329
+ },
1330
+ {
1331
+ "epoch": 0.6813966800228964,
1332
+ "grad_norm": 0.76171875,
1333
+ "learning_rate": 8.326628500827826e-06,
1334
+ "loss": 1.5015,
1335
+ "step": 186
1336
+ },
1337
+ {
1338
+ "epoch": 0.6850601030337722,
1339
+ "grad_norm": 0.74609375,
1340
+ "learning_rate": 8.302725823694619e-06,
1341
+ "loss": 1.4839,
1342
+ "step": 187
1343
+ },
1344
+ {
1345
+ "epoch": 0.6887235260446479,
1346
+ "grad_norm": 0.69921875,
1347
+ "learning_rate": 8.278688485948634e-06,
1348
+ "loss": 1.479,
1349
+ "step": 188
1350
+ },
1351
+ {
1352
+ "epoch": 0.6923869490555238,
1353
+ "grad_norm": 0.69921875,
1354
+ "learning_rate": 8.254517467653858e-06,
1355
+ "loss": 1.5126,
1356
+ "step": 189
1357
+ },
1358
+ {
1359
+ "epoch": 0.6960503720663995,
1360
+ "grad_norm": 0.67578125,
1361
+ "learning_rate": 8.230213754324773e-06,
1362
+ "loss": 1.4236,
1363
+ "step": 190
1364
+ },
1365
+ {
1366
+ "epoch": 0.6997137950772754,
1367
+ "grad_norm": 0.6953125,
1368
+ "learning_rate": 8.205778336886182e-06,
1369
+ "loss": 1.4576,
1370
+ "step": 191
1371
+ },
1372
+ {
1373
+ "epoch": 0.7033772180881511,
1374
+ "grad_norm": 0.68359375,
1375
+ "learning_rate": 8.1812122116328e-06,
1376
+ "loss": 1.4378,
1377
+ "step": 192
1378
+ },
1379
+ {
1380
+ "epoch": 0.7070406410990269,
1381
+ "grad_norm": 0.7421875,
1382
+ "learning_rate": 8.156516380188635e-06,
1383
+ "loss": 1.4678,
1384
+ "step": 193
1385
+ },
1386
+ {
1387
+ "epoch": 0.7107040641099027,
1388
+ "grad_norm": 0.734375,
1389
+ "learning_rate": 8.131691849466154e-06,
1390
+ "loss": 1.4711,
1391
+ "step": 194
1392
+ },
1393
+ {
1394
+ "epoch": 0.7143674871207785,
1395
+ "grad_norm": 0.65625,
1396
+ "learning_rate": 8.106739631625216e-06,
1397
+ "loss": 1.4751,
1398
+ "step": 195
1399
+ },
1400
+ {
1401
+ "epoch": 0.7180309101316542,
1402
+ "grad_norm": 0.8046875,
1403
+ "learning_rate": 8.081660744031818e-06,
1404
+ "loss": 1.4802,
1405
+ "step": 196
1406
+ },
1407
+ {
1408
+ "epoch": 0.7216943331425301,
1409
+ "grad_norm": 0.6953125,
1410
+ "learning_rate": 8.056456209216609e-06,
1411
+ "loss": 1.4942,
1412
+ "step": 197
1413
+ },
1414
+ {
1415
+ "epoch": 0.7253577561534058,
1416
+ "grad_norm": 0.6875,
1417
+ "learning_rate": 8.031127054833192e-06,
1418
+ "loss": 1.4993,
1419
+ "step": 198
1420
+ },
1421
+ {
1422
+ "epoch": 0.7290211791642817,
1423
+ "grad_norm": 0.71484375,
1424
+ "learning_rate": 8.005674313616231e-06,
1425
+ "loss": 1.4589,
1426
+ "step": 199
1427
+ },
1428
+ {
1429
+ "epoch": 0.7326846021751574,
1430
+ "grad_norm": 0.6953125,
1431
+ "learning_rate": 7.98009902333935e-06,
1432
+ "loss": 1.535,
1433
+ "step": 200
1434
+ },
1435
+ {
1436
+ "epoch": 0.7363480251860332,
1437
+ "grad_norm": 0.6953125,
1438
+ "learning_rate": 7.954402226772804e-06,
1439
+ "loss": 1.4564,
1440
+ "step": 201
1441
+ },
1442
+ {
1443
+ "epoch": 0.740011448196909,
1444
+ "grad_norm": 0.75390625,
1445
+ "learning_rate": 7.928584971640974e-06,
1446
+ "loss": 1.5054,
1447
+ "step": 202
1448
+ },
1449
+ {
1450
+ "epoch": 0.7436748712077847,
1451
+ "grad_norm": 0.74609375,
1452
+ "learning_rate": 7.90264831057965e-06,
1453
+ "loss": 1.4671,
1454
+ "step": 203
1455
+ },
1456
+ {
1457
+ "epoch": 0.7473382942186606,
1458
+ "grad_norm": 0.8046875,
1459
+ "learning_rate": 7.876593301093104e-06,
1460
+ "loss": 1.4747,
1461
+ "step": 204
1462
+ },
1463
+ {
1464
+ "epoch": 0.7510017172295363,
1465
+ "grad_norm": 0.79296875,
1466
+ "learning_rate": 7.850421005510977e-06,
1467
+ "loss": 1.4493,
1468
+ "step": 205
1469
+ },
1470
+ {
1471
+ "epoch": 0.7546651402404121,
1472
+ "grad_norm": 0.71875,
1473
+ "learning_rate": 7.824132490944968e-06,
1474
+ "loss": 1.4558,
1475
+ "step": 206
1476
+ },
1477
+ {
1478
+ "epoch": 0.758328563251288,
1479
+ "grad_norm": 0.7265625,
1480
+ "learning_rate": 7.797728829245321e-06,
1481
+ "loss": 1.4968,
1482
+ "step": 207
1483
+ },
1484
+ {
1485
+ "epoch": 0.758328563251288,
1486
+ "eval_loss": 1.4155006408691406,
1487
+ "eval_runtime": 86.4267,
1488
+ "eval_samples_per_second": 8.967,
1489
+ "eval_steps_per_second": 4.489,
1490
+ "step": 207
1491
+ },
1492
+ {
1493
+ "epoch": 0.7619919862621637,
1494
+ "grad_norm": 0.67578125,
1495
+ "learning_rate": 7.771211096957125e-06,
1496
+ "loss": 1.5118,
1497
+ "step": 208
1498
+ },
1499
+ {
1500
+ "epoch": 0.7656554092730395,
1501
+ "grad_norm": 0.7109375,
1502
+ "learning_rate": 7.744580375276416e-06,
1503
+ "loss": 1.4467,
1504
+ "step": 209
1505
+ },
1506
+ {
1507
+ "epoch": 0.7693188322839153,
1508
+ "grad_norm": 0.72265625,
1509
+ "learning_rate": 7.717837750006106e-06,
1510
+ "loss": 1.434,
1511
+ "step": 210
1512
+ },
1513
+ {
1514
+ "epoch": 0.772982255294791,
1515
+ "grad_norm": 0.68359375,
1516
+ "learning_rate": 7.690984311511695e-06,
1517
+ "loss": 1.4912,
1518
+ "step": 211
1519
+ },
1520
+ {
1521
+ "epoch": 0.7766456783056669,
1522
+ "grad_norm": 0.75390625,
1523
+ "learning_rate": 7.664021154676828e-06,
1524
+ "loss": 1.4985,
1525
+ "step": 212
1526
+ },
1527
+ {
1528
+ "epoch": 0.7803091013165426,
1529
+ "grad_norm": 0.6953125,
1530
+ "learning_rate": 7.636949378858647e-06,
1531
+ "loss": 1.4642,
1532
+ "step": 213
1533
+ },
1534
+ {
1535
+ "epoch": 0.7839725243274185,
1536
+ "grad_norm": 0.78125,
1537
+ "learning_rate": 7.609770087842969e-06,
1538
+ "loss": 1.5051,
1539
+ "step": 214
1540
+ },
1541
+ {
1542
+ "epoch": 0.7876359473382942,
1543
+ "grad_norm": 0.7265625,
1544
+ "learning_rate": 7.582484389799279e-06,
1545
+ "loss": 1.4656,
1546
+ "step": 215
1547
+ },
1548
+ {
1549
+ "epoch": 0.79129937034917,
1550
+ "grad_norm": 0.75,
1551
+ "learning_rate": 7.555093397235553e-06,
1552
+ "loss": 1.4589,
1553
+ "step": 216
1554
+ },
1555
+ {
1556
+ "epoch": 0.7949627933600458,
1557
+ "grad_norm": 0.74609375,
1558
+ "learning_rate": 7.527598226952895e-06,
1559
+ "loss": 1.499,
1560
+ "step": 217
1561
+ },
1562
+ {
1563
+ "epoch": 0.7986262163709216,
1564
+ "grad_norm": 0.71484375,
1565
+ "learning_rate": 7.500000000000001e-06,
1566
+ "loss": 1.4684,
1567
+ "step": 218
1568
+ },
1569
+ {
1570
+ "epoch": 0.8022896393817973,
1571
+ "grad_norm": 0.7109375,
1572
+ "learning_rate": 7.472299841627452e-06,
1573
+ "loss": 1.488,
1574
+ "step": 219
1575
+ },
1576
+ {
1577
+ "epoch": 0.8059530623926732,
1578
+ "grad_norm": 0.6953125,
1579
+ "learning_rate": 7.444498881241835e-06,
1580
+ "loss": 1.5102,
1581
+ "step": 220
1582
+ },
1583
+ {
1584
+ "epoch": 0.8096164854035489,
1585
+ "grad_norm": 0.6875,
1586
+ "learning_rate": 7.4165982523596945e-06,
1587
+ "loss": 1.4967,
1588
+ "step": 221
1589
+ },
1590
+ {
1591
+ "epoch": 0.8132799084144248,
1592
+ "grad_norm": 0.7109375,
1593
+ "learning_rate": 7.388599092561315e-06,
1594
+ "loss": 1.4965,
1595
+ "step": 222
1596
+ },
1597
+ {
1598
+ "epoch": 0.8169433314253005,
1599
+ "grad_norm": 0.7265625,
1600
+ "learning_rate": 7.360502543444339e-06,
1601
+ "loss": 1.4254,
1602
+ "step": 223
1603
+ },
1604
+ {
1605
+ "epoch": 0.8206067544361763,
1606
+ "grad_norm": 0.6953125,
1607
+ "learning_rate": 7.3323097505772225e-06,
1608
+ "loss": 1.4696,
1609
+ "step": 224
1610
+ },
1611
+ {
1612
+ "epoch": 0.8242701774470521,
1613
+ "grad_norm": 0.69921875,
1614
+ "learning_rate": 7.304021863452525e-06,
1615
+ "loss": 1.4695,
1616
+ "step": 225
1617
+ },
1618
+ {
1619
+ "epoch": 0.8279336004579279,
1620
+ "grad_norm": 0.69140625,
1621
+ "learning_rate": 7.2756400354400445e-06,
1622
+ "loss": 1.4825,
1623
+ "step": 226
1624
+ },
1625
+ {
1626
+ "epoch": 0.8315970234688037,
1627
+ "grad_norm": 0.69921875,
1628
+ "learning_rate": 7.24716542373979e-06,
1629
+ "loss": 1.4858,
1630
+ "step": 227
1631
+ },
1632
+ {
1633
+ "epoch": 0.8352604464796795,
1634
+ "grad_norm": 0.72265625,
1635
+ "learning_rate": 7.218599189334799e-06,
1636
+ "loss": 1.4415,
1637
+ "step": 228
1638
+ },
1639
+ {
1640
+ "epoch": 0.8389238694905552,
1641
+ "grad_norm": 0.765625,
1642
+ "learning_rate": 7.189942496943803e-06,
1643
+ "loss": 1.4819,
1644
+ "step": 229
1645
+ },
1646
+ {
1647
+ "epoch": 0.842587292501431,
1648
+ "grad_norm": 0.6953125,
1649
+ "learning_rate": 7.161196514973735e-06,
1650
+ "loss": 1.4335,
1651
+ "step": 230
1652
+ },
1653
+ {
1654
+ "epoch": 0.8462507155123068,
1655
+ "grad_norm": 0.6875,
1656
+ "learning_rate": 7.132362415472099e-06,
1657
+ "loss": 1.4494,
1658
+ "step": 231
1659
+ },
1660
+ {
1661
+ "epoch": 0.8499141385231826,
1662
+ "grad_norm": 0.7265625,
1663
+ "learning_rate": 7.1034413740791705e-06,
1664
+ "loss": 1.4306,
1665
+ "step": 232
1666
+ },
1667
+ {
1668
+ "epoch": 0.8535775615340584,
1669
+ "grad_norm": 0.69140625,
1670
+ "learning_rate": 7.0744345699800755e-06,
1671
+ "loss": 1.4598,
1672
+ "step": 233
1673
+ },
1674
+ {
1675
+ "epoch": 0.8572409845449341,
1676
+ "grad_norm": 0.73046875,
1677
+ "learning_rate": 7.045343185856701e-06,
1678
+ "loss": 1.4681,
1679
+ "step": 234
1680
+ },
1681
+ {
1682
+ "epoch": 0.86090440755581,
1683
+ "grad_norm": 0.6953125,
1684
+ "learning_rate": 7.016168407839478e-06,
1685
+ "loss": 1.4952,
1686
+ "step": 235
1687
+ },
1688
+ {
1689
+ "epoch": 0.8645678305666857,
1690
+ "grad_norm": 0.66796875,
1691
+ "learning_rate": 6.986911425459028e-06,
1692
+ "loss": 1.4403,
1693
+ "step": 236
1694
+ },
1695
+ {
1696
+ "epoch": 0.8682312535775615,
1697
+ "grad_norm": 0.73828125,
1698
+ "learning_rate": 6.957573431597646e-06,
1699
+ "loss": 1.4945,
1700
+ "step": 237
1701
+ },
1702
+ {
1703
+ "epoch": 0.8718946765884373,
1704
+ "grad_norm": 0.7578125,
1705
+ "learning_rate": 6.92815562244068e-06,
1706
+ "loss": 1.4798,
1707
+ "step": 238
1708
+ },
1709
+ {
1710
+ "epoch": 0.8755580995993131,
1711
+ "grad_norm": 0.69921875,
1712
+ "learning_rate": 6.898659197427748e-06,
1713
+ "loss": 1.4269,
1714
+ "step": 239
1715
+ },
1716
+ {
1717
+ "epoch": 0.8792215226101889,
1718
+ "grad_norm": 0.703125,
1719
+ "learning_rate": 6.869085359203844e-06,
1720
+ "loss": 1.4421,
1721
+ "step": 240
1722
+ },
1723
+ {
1724
+ "epoch": 0.8828849456210647,
1725
+ "grad_norm": 0.7109375,
1726
+ "learning_rate": 6.839435313570293e-06,
1727
+ "loss": 1.4389,
1728
+ "step": 241
1729
+ },
1730
+ {
1731
+ "epoch": 0.8865483686319404,
1732
+ "grad_norm": 0.70703125,
1733
+ "learning_rate": 6.80971026943559e-06,
1734
+ "loss": 1.4923,
1735
+ "step": 242
1736
+ },
1737
+ {
1738
+ "epoch": 0.8902117916428163,
1739
+ "grad_norm": 0.65234375,
1740
+ "learning_rate": 6.779911438766117e-06,
1741
+ "loss": 1.4152,
1742
+ "step": 243
1743
+ },
1744
+ {
1745
+ "epoch": 0.893875214653692,
1746
+ "grad_norm": 0.6796875,
1747
+ "learning_rate": 6.750040036536718e-06,
1748
+ "loss": 1.446,
1749
+ "step": 244
1750
+ },
1751
+ {
1752
+ "epoch": 0.8975386376645679,
1753
+ "grad_norm": 0.69140625,
1754
+ "learning_rate": 6.72009728068117e-06,
1755
+ "loss": 1.4097,
1756
+ "step": 245
1757
+ },
1758
+ {
1759
+ "epoch": 0.9012020606754436,
1760
+ "grad_norm": 0.6875,
1761
+ "learning_rate": 6.690084392042514e-06,
1762
+ "loss": 1.4293,
1763
+ "step": 246
1764
+ },
1765
+ {
1766
+ "epoch": 0.9048654836863194,
1767
+ "grad_norm": 0.6953125,
1768
+ "learning_rate": 6.6600025943232935e-06,
1769
+ "loss": 1.464,
1770
+ "step": 247
1771
+ },
1772
+ {
1773
+ "epoch": 0.9085289066971952,
1774
+ "grad_norm": 0.7421875,
1775
+ "learning_rate": 6.629853114035643e-06,
1776
+ "loss": 1.4908,
1777
+ "step": 248
1778
+ },
1779
+ {
1780
+ "epoch": 0.912192329708071,
1781
+ "grad_norm": 0.7109375,
1782
+ "learning_rate": 6.599637180451295e-06,
1783
+ "loss": 1.4942,
1784
+ "step": 249
1785
+ },
1786
+ {
1787
+ "epoch": 0.9158557527189468,
1788
+ "grad_norm": 0.734375,
1789
+ "learning_rate": 6.569356025551454e-06,
1790
+ "loss": 1.4677,
1791
+ "step": 250
1792
+ },
1793
+ {
1794
+ "epoch": 0.9195191757298226,
1795
+ "grad_norm": 0.67578125,
1796
+ "learning_rate": 6.539010883976562e-06,
1797
+ "loss": 1.4457,
1798
+ "step": 251
1799
+ },
1800
+ {
1801
+ "epoch": 0.9231825987406983,
1802
+ "grad_norm": 0.703125,
1803
+ "learning_rate": 6.508602992975963e-06,
1804
+ "loss": 1.4622,
1805
+ "step": 252
1806
+ },
1807
+ {
1808
+ "epoch": 0.9268460217515742,
1809
+ "grad_norm": 0.65625,
1810
+ "learning_rate": 6.478133592357455e-06,
1811
+ "loss": 1.4319,
1812
+ "step": 253
1813
+ },
1814
+ {
1815
+ "epoch": 0.9305094447624499,
1816
+ "grad_norm": 0.703125,
1817
+ "learning_rate": 6.447603924436744e-06,
1818
+ "loss": 1.453,
1819
+ "step": 254
1820
+ },
1821
+ {
1822
+ "epoch": 0.9341728677733258,
1823
+ "grad_norm": 0.70703125,
1824
+ "learning_rate": 6.417015233986786e-06,
1825
+ "loss": 1.4239,
1826
+ "step": 255
1827
+ },
1828
+ {
1829
+ "epoch": 0.9378362907842015,
1830
+ "grad_norm": 0.79296875,
1831
+ "learning_rate": 6.38636876818704e-06,
1832
+ "loss": 1.4798,
1833
+ "step": 256
1834
+ },
1835
+ {
1836
+ "epoch": 0.9414997137950772,
1837
+ "grad_norm": 0.79296875,
1838
+ "learning_rate": 6.3556657765726116e-06,
1839
+ "loss": 1.4944,
1840
+ "step": 257
1841
+ },
1842
+ {
1843
+ "epoch": 0.9451631368059531,
1844
+ "grad_norm": 0.7421875,
1845
+ "learning_rate": 6.32490751098331e-06,
1846
+ "loss": 1.4843,
1847
+ "step": 258
1848
+ },
1849
+ {
1850
+ "epoch": 0.9488265598168288,
1851
+ "grad_norm": 0.67578125,
1852
+ "learning_rate": 6.294095225512604e-06,
1853
+ "loss": 1.495,
1854
+ "step": 259
1855
+ },
1856
+ {
1857
+ "epoch": 0.9524899828277046,
1858
+ "grad_norm": 0.68359375,
1859
+ "learning_rate": 6.263230176456497e-06,
1860
+ "loss": 1.4301,
1861
+ "step": 260
1862
+ },
1863
+ {
1864
+ "epoch": 0.9561534058385804,
1865
+ "grad_norm": 0.74609375,
1866
+ "learning_rate": 6.232313622262297e-06,
1867
+ "loss": 1.5314,
1868
+ "step": 261
1869
+ },
1870
+ {
1871
+ "epoch": 0.9598168288494562,
1872
+ "grad_norm": 0.69140625,
1873
+ "learning_rate": 6.2013468234773034e-06,
1874
+ "loss": 1.4744,
1875
+ "step": 262
1876
+ },
1877
+ {
1878
+ "epoch": 0.963480251860332,
1879
+ "grad_norm": 0.67578125,
1880
+ "learning_rate": 6.170331042697425e-06,
1881
+ "loss": 1.4152,
1882
+ "step": 263
1883
+ },
1884
+ {
1885
+ "epoch": 0.9671436748712078,
1886
+ "grad_norm": 0.73828125,
1887
+ "learning_rate": 6.139267544515689e-06,
1888
+ "loss": 1.4507,
1889
+ "step": 264
1890
+ },
1891
+ {
1892
+ "epoch": 0.9708070978820835,
1893
+ "grad_norm": 0.6796875,
1894
+ "learning_rate": 6.10815759547068e-06,
1895
+ "loss": 1.4621,
1896
+ "step": 265
1897
+ },
1898
+ {
1899
+ "epoch": 0.9744705208929594,
1900
+ "grad_norm": 0.6953125,
1901
+ "learning_rate": 6.077002463994908e-06,
1902
+ "loss": 1.4759,
1903
+ "step": 266
1904
+ },
1905
+ {
1906
+ "epoch": 0.9781339439038351,
1907
+ "grad_norm": 0.70703125,
1908
+ "learning_rate": 6.045803420363085e-06,
1909
+ "loss": 1.5047,
1910
+ "step": 267
1911
+ },
1912
+ {
1913
+ "epoch": 0.981797366914711,
1914
+ "grad_norm": 0.76171875,
1915
+ "learning_rate": 6.014561736640334e-06,
1916
+ "loss": 1.446,
1917
+ "step": 268
1918
+ },
1919
+ {
1920
+ "epoch": 0.9854607899255867,
1921
+ "grad_norm": 0.76953125,
1922
+ "learning_rate": 5.983278686630327e-06,
1923
+ "loss": 1.4742,
1924
+ "step": 269
1925
+ },
1926
+ {
1927
+ "epoch": 0.9891242129364625,
1928
+ "grad_norm": 0.703125,
1929
+ "learning_rate": 5.951955545823342e-06,
1930
+ "loss": 1.4837,
1931
+ "step": 270
1932
+ },
1933
+ {
1934
+ "epoch": 0.9927876359473383,
1935
+ "grad_norm": 0.7421875,
1936
+ "learning_rate": 5.920593591344264e-06,
1937
+ "loss": 1.4585,
1938
+ "step": 271
1939
+ },
1940
+ {
1941
+ "epoch": 0.9964510589582141,
1942
+ "grad_norm": 0.67578125,
1943
+ "learning_rate": 5.8891941019005095e-06,
1944
+ "loss": 1.4889,
1945
+ "step": 272
1946
+ },
1947
+ {
1948
+ "epoch": 1.00011448196909,
1949
+ "grad_norm": 0.7265625,
1950
+ "learning_rate": 5.857758357729892e-06,
1951
+ "loss": 1.5012,
1952
+ "step": 273
1953
+ },
1954
+ {
1955
+ "epoch": 1.00343997248022,
1956
+ "grad_norm": 0.6875,
1957
+ "learning_rate": 5.826287640548425e-06,
1958
+ "loss": 1.4676,
1959
+ "step": 274
1960
+ },
1961
+ {
1962
+ "epoch": 1.0071092764591216,
1963
+ "grad_norm": 0.6875,
1964
+ "learning_rate": 5.794783233498062e-06,
1965
+ "loss": 1.4632,
1966
+ "step": 275
1967
+ },
1968
+ {
1969
+ "epoch": 1.0107785804380232,
1970
+ "grad_norm": 0.734375,
1971
+ "learning_rate": 5.763246421094373e-06,
1972
+ "loss": 1.4817,
1973
+ "step": 276
1974
+ },
1975
+ {
1976
+ "epoch": 1.0107785804380232,
1977
+ "eval_loss": 1.4074798822402954,
1978
+ "eval_runtime": 86.3869,
1979
+ "eval_samples_per_second": 8.971,
1980
+ "eval_steps_per_second": 4.491,
1981
+ "step": 276
1982
+ },
1983
+ {
1984
+ "epoch": 1.0144478844169247,
1985
+ "grad_norm": 0.69140625,
1986
+ "learning_rate": 5.731678489174186e-06,
1987
+ "loss": 1.3825,
1988
+ "step": 277
1989
+ },
1990
+ {
1991
+ "epoch": 1.0181171883958262,
1992
+ "grad_norm": 0.75,
1993
+ "learning_rate": 5.7000807248431466e-06,
1994
+ "loss": 1.4202,
1995
+ "step": 278
1996
+ },
1997
+ {
1998
+ "epoch": 1.0217864923747277,
1999
+ "grad_norm": 0.7578125,
2000
+ "learning_rate": 5.668454416423243e-06,
2001
+ "loss": 1.4907,
2002
+ "step": 279
2003
+ },
2004
+ {
2005
+ "epoch": 1.0254557963536293,
2006
+ "grad_norm": 0.70703125,
2007
+ "learning_rate": 5.636800853400285e-06,
2008
+ "loss": 1.4115,
2009
+ "step": 280
2010
+ },
2011
+ {
2012
+ "epoch": 1.0291251003325306,
2013
+ "grad_norm": 0.7421875,
2014
+ "learning_rate": 5.605121326371316e-06,
2015
+ "loss": 1.4316,
2016
+ "step": 281
2017
+ },
2018
+ {
2019
+ "epoch": 1.032794404311432,
2020
+ "grad_norm": 0.69140625,
2021
+ "learning_rate": 5.573417126992004e-06,
2022
+ "loss": 1.4734,
2023
+ "step": 282
2024
+ },
2025
+ {
2026
+ "epoch": 1.0364637082903336,
2027
+ "grad_norm": 0.69140625,
2028
+ "learning_rate": 5.5416895479239665e-06,
2029
+ "loss": 1.4181,
2030
+ "step": 283
2031
+ },
2032
+ {
2033
+ "epoch": 1.0401330122692352,
2034
+ "grad_norm": 0.70703125,
2035
+ "learning_rate": 5.509939882782077e-06,
2036
+ "loss": 1.5014,
2037
+ "step": 284
2038
+ },
2039
+ {
2040
+ "epoch": 1.0438023162481367,
2041
+ "grad_norm": 0.67578125,
2042
+ "learning_rate": 5.478169426081712e-06,
2043
+ "loss": 1.4454,
2044
+ "step": 285
2045
+ },
2046
+ {
2047
+ "epoch": 1.0474716202270382,
2048
+ "grad_norm": 0.71875,
2049
+ "learning_rate": 5.446379473185972e-06,
2050
+ "loss": 1.4678,
2051
+ "step": 286
2052
+ },
2053
+ {
2054
+ "epoch": 1.0511409242059397,
2055
+ "grad_norm": 0.671875,
2056
+ "learning_rate": 5.41457132025287e-06,
2057
+ "loss": 1.4437,
2058
+ "step": 287
2059
+ },
2060
+ {
2061
+ "epoch": 1.0548102281848413,
2062
+ "grad_norm": 0.69140625,
2063
+ "learning_rate": 5.38274626418248e-06,
2064
+ "loss": 1.4636,
2065
+ "step": 288
2066
+ },
2067
+ {
2068
+ "epoch": 1.0584795321637426,
2069
+ "grad_norm": 0.6953125,
2070
+ "learning_rate": 5.3509056025640575e-06,
2071
+ "loss": 1.4414,
2072
+ "step": 289
2073
+ },
2074
+ {
2075
+ "epoch": 1.062148836142644,
2076
+ "grad_norm": 0.75390625,
2077
+ "learning_rate": 5.319050633623141e-06,
2078
+ "loss": 1.5076,
2079
+ "step": 290
2080
+ },
2081
+ {
2082
+ "epoch": 1.0658181401215456,
2083
+ "grad_norm": 0.671875,
2084
+ "learning_rate": 5.287182656168618e-06,
2085
+ "loss": 1.4199,
2086
+ "step": 291
2087
+ },
2088
+ {
2089
+ "epoch": 1.0694874441004472,
2090
+ "grad_norm": 0.7421875,
2091
+ "learning_rate": 5.255302969539753e-06,
2092
+ "loss": 1.4788,
2093
+ "step": 292
2094
+ },
2095
+ {
2096
+ "epoch": 1.0731567480793487,
2097
+ "grad_norm": 0.71875,
2098
+ "learning_rate": 5.22341287355324e-06,
2099
+ "loss": 1.4889,
2100
+ "step": 293
2101
+ },
2102
+ {
2103
+ "epoch": 1.0768260520582502,
2104
+ "grad_norm": 0.6875,
2105
+ "learning_rate": 5.191513668450178e-06,
2106
+ "loss": 1.4079,
2107
+ "step": 294
2108
+ },
2109
+ {
2110
+ "epoch": 1.0804953560371517,
2111
+ "grad_norm": 0.71875,
2112
+ "learning_rate": 5.1596066548430725e-06,
2113
+ "loss": 1.5113,
2114
+ "step": 295
2115
+ },
2116
+ {
2117
+ "epoch": 1.0841646600160533,
2118
+ "grad_norm": 0.6796875,
2119
+ "learning_rate": 5.127693133662801e-06,
2120
+ "loss": 1.4427,
2121
+ "step": 296
2122
+ },
2123
+ {
2124
+ "epoch": 1.0878339639949548,
2125
+ "grad_norm": 0.70703125,
2126
+ "learning_rate": 5.095774406105572e-06,
2127
+ "loss": 1.4381,
2128
+ "step": 297
2129
+ },
2130
+ {
2131
+ "epoch": 1.091503267973856,
2132
+ "grad_norm": 0.6875,
2133
+ "learning_rate": 5.06385177357987e-06,
2134
+ "loss": 1.4975,
2135
+ "step": 298
2136
+ },
2137
+ {
2138
+ "epoch": 1.0951725719527576,
2139
+ "grad_norm": 0.6953125,
2140
+ "learning_rate": 5.031926537653396e-06,
2141
+ "loss": 1.4333,
2142
+ "step": 299
2143
+ },
2144
+ {
2145
+ "epoch": 1.0988418759316592,
2146
+ "grad_norm": 0.66015625,
2147
+ "learning_rate": 5e-06,
2148
+ "loss": 1.4498,
2149
+ "step": 300
2150
+ },
2151
+ {
2152
+ "epoch": 1.1025111799105607,
2153
+ "grad_norm": 0.67578125,
2154
+ "learning_rate": 4.968073462346605e-06,
2155
+ "loss": 1.428,
2156
+ "step": 301
2157
+ },
2158
+ {
2159
+ "epoch": 1.1061804838894622,
2160
+ "grad_norm": 0.69140625,
2161
+ "learning_rate": 4.936148226420133e-06,
2162
+ "loss": 1.5139,
2163
+ "step": 302
2164
+ },
2165
+ {
2166
+ "epoch": 1.1098497878683637,
2167
+ "grad_norm": 0.70703125,
2168
+ "learning_rate": 4.90422559389443e-06,
2169
+ "loss": 1.4781,
2170
+ "step": 303
2171
+ },
2172
+ {
2173
+ "epoch": 1.1135190918472653,
2174
+ "grad_norm": 0.73828125,
2175
+ "learning_rate": 4.8723068663372005e-06,
2176
+ "loss": 1.5284,
2177
+ "step": 304
2178
+ },
2179
+ {
2180
+ "epoch": 1.1171883958261668,
2181
+ "grad_norm": 0.765625,
2182
+ "learning_rate": 4.84039334515693e-06,
2183
+ "loss": 1.4669,
2184
+ "step": 305
2185
+ },
2186
+ {
2187
+ "epoch": 1.120857699805068,
2188
+ "grad_norm": 0.7265625,
2189
+ "learning_rate": 4.808486331549824e-06,
2190
+ "loss": 1.4945,
2191
+ "step": 306
2192
+ },
2193
+ {
2194
+ "epoch": 1.1245270037839696,
2195
+ "grad_norm": 0.67578125,
2196
+ "learning_rate": 4.776587126446761e-06,
2197
+ "loss": 1.5031,
2198
+ "step": 307
2199
+ },
2200
+ {
2201
+ "epoch": 1.1281963077628712,
2202
+ "grad_norm": 0.6875,
2203
+ "learning_rate": 4.744697030460248e-06,
2204
+ "loss": 1.4379,
2205
+ "step": 308
2206
+ },
2207
+ {
2208
+ "epoch": 1.1318656117417727,
2209
+ "grad_norm": 0.68359375,
2210
+ "learning_rate": 4.712817343831384e-06,
2211
+ "loss": 1.4564,
2212
+ "step": 309
2213
+ },
2214
+ {
2215
+ "epoch": 1.1355349157206742,
2216
+ "grad_norm": 0.703125,
2217
+ "learning_rate": 4.680949366376858e-06,
2218
+ "loss": 1.4879,
2219
+ "step": 310
2220
+ },
2221
+ {
2222
+ "epoch": 1.1392042196995757,
2223
+ "grad_norm": 0.6953125,
2224
+ "learning_rate": 4.649094397435944e-06,
2225
+ "loss": 1.5162,
2226
+ "step": 311
2227
+ },
2228
+ {
2229
+ "epoch": 1.1428735236784773,
2230
+ "grad_norm": 0.73828125,
2231
+ "learning_rate": 4.617253735817522e-06,
2232
+ "loss": 1.4739,
2233
+ "step": 312
2234
+ },
2235
+ {
2236
+ "epoch": 1.1465428276573788,
2237
+ "grad_norm": 0.73046875,
2238
+ "learning_rate": 4.585428679747133e-06,
2239
+ "loss": 1.4835,
2240
+ "step": 313
2241
+ },
2242
+ {
2243
+ "epoch": 1.1502121316362803,
2244
+ "grad_norm": 0.71484375,
2245
+ "learning_rate": 4.553620526814029e-06,
2246
+ "loss": 1.414,
2247
+ "step": 314
2248
+ },
2249
+ {
2250
+ "epoch": 1.1538814356151819,
2251
+ "grad_norm": 0.734375,
2252
+ "learning_rate": 4.521830573918289e-06,
2253
+ "loss": 1.4474,
2254
+ "step": 315
2255
+ },
2256
+ {
2257
+ "epoch": 1.1575507395940832,
2258
+ "grad_norm": 0.69140625,
2259
+ "learning_rate": 4.490060117217925e-06,
2260
+ "loss": 1.428,
2261
+ "step": 316
2262
+ },
2263
+ {
2264
+ "epoch": 1.1612200435729847,
2265
+ "grad_norm": 0.66796875,
2266
+ "learning_rate": 4.458310452076034e-06,
2267
+ "loss": 1.4305,
2268
+ "step": 317
2269
+ },
2270
+ {
2271
+ "epoch": 1.1648893475518862,
2272
+ "grad_norm": 0.70703125,
2273
+ "learning_rate": 4.426582873007999e-06,
2274
+ "loss": 1.4561,
2275
+ "step": 318
2276
+ },
2277
+ {
2278
+ "epoch": 1.1685586515307878,
2279
+ "grad_norm": 0.71484375,
2280
+ "learning_rate": 4.3948786736286866e-06,
2281
+ "loss": 1.4406,
2282
+ "step": 319
2283
+ },
2284
+ {
2285
+ "epoch": 1.1722279555096893,
2286
+ "grad_norm": 0.671875,
2287
+ "learning_rate": 4.363199146599717e-06,
2288
+ "loss": 1.4912,
2289
+ "step": 320
2290
+ },
2291
+ {
2292
+ "epoch": 1.1758972594885908,
2293
+ "grad_norm": 0.68359375,
2294
+ "learning_rate": 4.331545583576758e-06,
2295
+ "loss": 1.4481,
2296
+ "step": 321
2297
+ },
2298
+ {
2299
+ "epoch": 1.1795665634674923,
2300
+ "grad_norm": 0.7421875,
2301
+ "learning_rate": 4.299919275156857e-06,
2302
+ "loss": 1.4796,
2303
+ "step": 322
2304
+ },
2305
+ {
2306
+ "epoch": 1.1832358674463936,
2307
+ "grad_norm": 0.6796875,
2308
+ "learning_rate": 4.2683215108258145e-06,
2309
+ "loss": 1.5001,
2310
+ "step": 323
2311
+ },
2312
+ {
2313
+ "epoch": 1.1869051714252952,
2314
+ "grad_norm": 0.67578125,
2315
+ "learning_rate": 4.236753578905627e-06,
2316
+ "loss": 1.4334,
2317
+ "step": 324
2318
+ },
2319
+ {
2320
+ "epoch": 1.1905744754041967,
2321
+ "grad_norm": 0.6875,
2322
+ "learning_rate": 4.205216766501941e-06,
2323
+ "loss": 1.5136,
2324
+ "step": 325
2325
+ },
2326
+ {
2327
+ "epoch": 1.1942437793830982,
2328
+ "grad_norm": 0.6796875,
2329
+ "learning_rate": 4.173712359451576e-06,
2330
+ "loss": 1.5047,
2331
+ "step": 326
2332
+ },
2333
+ {
2334
+ "epoch": 1.1979130833619998,
2335
+ "grad_norm": 0.6484375,
2336
+ "learning_rate": 4.142241642270109e-06,
2337
+ "loss": 1.4266,
2338
+ "step": 327
2339
+ },
2340
+ {
2341
+ "epoch": 1.2015823873409013,
2342
+ "grad_norm": 0.71484375,
2343
+ "learning_rate": 4.110805898099492e-06,
2344
+ "loss": 1.5102,
2345
+ "step": 328
2346
+ },
2347
+ {
2348
+ "epoch": 1.2052516913198028,
2349
+ "grad_norm": 0.70703125,
2350
+ "learning_rate": 4.079406408655737e-06,
2351
+ "loss": 1.4405,
2352
+ "step": 329
2353
+ },
2354
+ {
2355
+ "epoch": 1.2089209952987043,
2356
+ "grad_norm": 0.70703125,
2357
+ "learning_rate": 4.048044454176658e-06,
2358
+ "loss": 1.4366,
2359
+ "step": 330
2360
+ },
2361
+ {
2362
+ "epoch": 1.2125902992776059,
2363
+ "grad_norm": 0.74609375,
2364
+ "learning_rate": 4.016721313369674e-06,
2365
+ "loss": 1.4686,
2366
+ "step": 331
2367
+ },
2368
+ {
2369
+ "epoch": 1.2162596032565074,
2370
+ "grad_norm": 0.7109375,
2371
+ "learning_rate": 3.985438263359667e-06,
2372
+ "loss": 1.4541,
2373
+ "step": 332
2374
+ },
2375
+ {
2376
+ "epoch": 1.2199289072354087,
2377
+ "grad_norm": 0.6875,
2378
+ "learning_rate": 3.954196579636918e-06,
2379
+ "loss": 1.4657,
2380
+ "step": 333
2381
+ },
2382
+ {
2383
+ "epoch": 1.2235982112143102,
2384
+ "grad_norm": 0.734375,
2385
+ "learning_rate": 3.922997536005094e-06,
2386
+ "loss": 1.3919,
2387
+ "step": 334
2388
+ },
2389
+ {
2390
+ "epoch": 1.2272675151932118,
2391
+ "grad_norm": 0.71484375,
2392
+ "learning_rate": 3.891842404529321e-06,
2393
+ "loss": 1.4672,
2394
+ "step": 335
2395
+ },
2396
+ {
2397
+ "epoch": 1.2309368191721133,
2398
+ "grad_norm": 0.69140625,
2399
+ "learning_rate": 3.860732455484314e-06,
2400
+ "loss": 1.4108,
2401
+ "step": 336
2402
+ },
2403
+ {
2404
+ "epoch": 1.2346061231510148,
2405
+ "grad_norm": 0.69921875,
2406
+ "learning_rate": 3.829668957302576e-06,
2407
+ "loss": 1.4663,
2408
+ "step": 337
2409
+ },
2410
+ {
2411
+ "epoch": 1.2382754271299163,
2412
+ "grad_norm": 0.671875,
2413
+ "learning_rate": 3.7986531765226965e-06,
2414
+ "loss": 1.4127,
2415
+ "step": 338
2416
+ },
2417
+ {
2418
+ "epoch": 1.2419447311088179,
2419
+ "grad_norm": 0.71875,
2420
+ "learning_rate": 3.7676863777377055e-06,
2421
+ "loss": 1.4151,
2422
+ "step": 339
2423
+ },
2424
+ {
2425
+ "epoch": 1.2456140350877192,
2426
+ "grad_norm": 0.765625,
2427
+ "learning_rate": 3.7367698235435036e-06,
2428
+ "loss": 1.4531,
2429
+ "step": 340
2430
+ },
2431
+ {
2432
+ "epoch": 1.2492833390666207,
2433
+ "grad_norm": 0.6953125,
2434
+ "learning_rate": 3.705904774487396e-06,
2435
+ "loss": 1.4463,
2436
+ "step": 341
2437
+ },
2438
+ {
2439
+ "epoch": 1.2529526430455222,
2440
+ "grad_norm": 0.68359375,
2441
+ "learning_rate": 3.675092489016693e-06,
2442
+ "loss": 1.4502,
2443
+ "step": 342
2444
+ },
2445
+ {
2446
+ "epoch": 1.2566219470244238,
2447
+ "grad_norm": 0.703125,
2448
+ "learning_rate": 3.6443342234273905e-06,
2449
+ "loss": 1.4793,
2450
+ "step": 343
2451
+ },
2452
+ {
2453
+ "epoch": 1.2602912510033253,
2454
+ "grad_norm": 0.75390625,
2455
+ "learning_rate": 3.61363123181296e-06,
2456
+ "loss": 1.5136,
2457
+ "step": 344
2458
+ },
2459
+ {
2460
+ "epoch": 1.2639605549822268,
2461
+ "grad_norm": 0.6875,
2462
+ "learning_rate": 3.582984766013215e-06,
2463
+ "loss": 1.4637,
2464
+ "step": 345
2465
+ },
2466
+ {
2467
+ "epoch": 1.2639605549822268,
2468
+ "eval_loss": 1.4037530422210693,
2469
+ "eval_runtime": 86.5383,
2470
+ "eval_samples_per_second": 8.956,
2471
+ "eval_steps_per_second": 4.484,
2472
+ "step": 345
2473
+ },
2474
+ {
2475
+ "epoch": 1.2676298589611283,
2476
+ "grad_norm": 0.6875,
2477
+ "learning_rate": 3.5523960755632573e-06,
2478
+ "loss": 1.5192,
2479
+ "step": 346
2480
+ },
2481
+ {
2482
+ "epoch": 1.2712991629400299,
2483
+ "grad_norm": 0.6875,
2484
+ "learning_rate": 3.5218664076425455e-06,
2485
+ "loss": 1.4695,
2486
+ "step": 347
2487
+ },
2488
+ {
2489
+ "epoch": 1.2749684669189314,
2490
+ "grad_norm": 0.6953125,
2491
+ "learning_rate": 3.4913970070240388e-06,
2492
+ "loss": 1.4707,
2493
+ "step": 348
2494
+ },
2495
+ {
2496
+ "epoch": 1.278637770897833,
2497
+ "grad_norm": 0.6640625,
2498
+ "learning_rate": 3.460989116023439e-06,
2499
+ "loss": 1.4646,
2500
+ "step": 349
2501
+ },
2502
+ {
2503
+ "epoch": 1.2823070748767345,
2504
+ "grad_norm": 0.734375,
2505
+ "learning_rate": 3.4306439744485453e-06,
2506
+ "loss": 1.4668,
2507
+ "step": 350
2508
+ },
2509
+ {
2510
+ "epoch": 1.2859763788556358,
2511
+ "grad_norm": 0.6875,
2512
+ "learning_rate": 3.400362819548706e-06,
2513
+ "loss": 1.4729,
2514
+ "step": 351
2515
+ },
2516
+ {
2517
+ "epoch": 1.2896456828345373,
2518
+ "grad_norm": 0.6796875,
2519
+ "learning_rate": 3.3701468859643583e-06,
2520
+ "loss": 1.4596,
2521
+ "step": 352
2522
+ },
2523
+ {
2524
+ "epoch": 1.2933149868134388,
2525
+ "grad_norm": 0.72265625,
2526
+ "learning_rate": 3.3399974056767095e-06,
2527
+ "loss": 1.4346,
2528
+ "step": 353
2529
+ },
2530
+ {
2531
+ "epoch": 1.2969842907923403,
2532
+ "grad_norm": 0.69921875,
2533
+ "learning_rate": 3.309915607957487e-06,
2534
+ "loss": 1.432,
2535
+ "step": 354
2536
+ },
2537
+ {
2538
+ "epoch": 1.3006535947712419,
2539
+ "grad_norm": 0.68359375,
2540
+ "learning_rate": 3.2799027193188316e-06,
2541
+ "loss": 1.3905,
2542
+ "step": 355
2543
+ },
2544
+ {
2545
+ "epoch": 1.3043228987501434,
2546
+ "grad_norm": 0.703125,
2547
+ "learning_rate": 3.249959963463283e-06,
2548
+ "loss": 1.4609,
2549
+ "step": 356
2550
+ },
2551
+ {
2552
+ "epoch": 1.3079922027290447,
2553
+ "grad_norm": 0.6796875,
2554
+ "learning_rate": 3.2200885612338846e-06,
2555
+ "loss": 1.4484,
2556
+ "step": 357
2557
+ },
2558
+ {
2559
+ "epoch": 1.3116615067079462,
2560
+ "grad_norm": 0.6796875,
2561
+ "learning_rate": 3.19028973056441e-06,
2562
+ "loss": 1.4393,
2563
+ "step": 358
2564
+ },
2565
+ {
2566
+ "epoch": 1.3153308106868478,
2567
+ "grad_norm": 0.71875,
2568
+ "learning_rate": 3.160564686429709e-06,
2569
+ "loss": 1.4205,
2570
+ "step": 359
2571
+ },
2572
+ {
2573
+ "epoch": 1.3190001146657493,
2574
+ "grad_norm": 0.65234375,
2575
+ "learning_rate": 3.1309146407961565e-06,
2576
+ "loss": 1.4736,
2577
+ "step": 360
2578
+ },
2579
+ {
2580
+ "epoch": 1.3226694186446508,
2581
+ "grad_norm": 0.6875,
2582
+ "learning_rate": 3.1013408025722515e-06,
2583
+ "loss": 1.4501,
2584
+ "step": 361
2585
+ },
2586
+ {
2587
+ "epoch": 1.3263387226235523,
2588
+ "grad_norm": 0.640625,
2589
+ "learning_rate": 3.0718443775593233e-06,
2590
+ "loss": 1.4038,
2591
+ "step": 362
2592
+ },
2593
+ {
2594
+ "epoch": 1.3300080266024539,
2595
+ "grad_norm": 0.72265625,
2596
+ "learning_rate": 3.0424265684023556e-06,
2597
+ "loss": 1.4786,
2598
+ "step": 363
2599
+ },
2600
+ {
2601
+ "epoch": 1.3336773305813554,
2602
+ "grad_norm": 0.71875,
2603
+ "learning_rate": 3.0130885745409744e-06,
2604
+ "loss": 1.4944,
2605
+ "step": 364
2606
+ },
2607
+ {
2608
+ "epoch": 1.337346634560257,
2609
+ "grad_norm": 0.72265625,
2610
+ "learning_rate": 2.9838315921605237e-06,
2611
+ "loss": 1.4274,
2612
+ "step": 365
2613
+ },
2614
+ {
2615
+ "epoch": 1.3410159385391585,
2616
+ "grad_norm": 0.71875,
2617
+ "learning_rate": 2.9546568141433007e-06,
2618
+ "loss": 1.4483,
2619
+ "step": 366
2620
+ },
2621
+ {
2622
+ "epoch": 1.34468524251806,
2623
+ "grad_norm": 0.6953125,
2624
+ "learning_rate": 2.9255654300199253e-06,
2625
+ "loss": 1.4838,
2626
+ "step": 367
2627
+ },
2628
+ {
2629
+ "epoch": 1.3483545464969613,
2630
+ "grad_norm": 0.66796875,
2631
+ "learning_rate": 2.8965586259208295e-06,
2632
+ "loss": 1.4264,
2633
+ "step": 368
2634
+ },
2635
+ {
2636
+ "epoch": 1.3520238504758628,
2637
+ "grad_norm": 0.69140625,
2638
+ "learning_rate": 2.8676375845279013e-06,
2639
+ "loss": 1.4688,
2640
+ "step": 369
2641
+ },
2642
+ {
2643
+ "epoch": 1.3556931544547643,
2644
+ "grad_norm": 0.71484375,
2645
+ "learning_rate": 2.838803485026265e-06,
2646
+ "loss": 1.4343,
2647
+ "step": 370
2648
+ },
2649
+ {
2650
+ "epoch": 1.3593624584336659,
2651
+ "grad_norm": 0.671875,
2652
+ "learning_rate": 2.810057503056198e-06,
2653
+ "loss": 1.4511,
2654
+ "step": 371
2655
+ },
2656
+ {
2657
+ "epoch": 1.3630317624125674,
2658
+ "grad_norm": 0.67578125,
2659
+ "learning_rate": 2.781400810665201e-06,
2660
+ "loss": 1.4483,
2661
+ "step": 372
2662
+ },
2663
+ {
2664
+ "epoch": 1.366701066391469,
2665
+ "grad_norm": 0.73046875,
2666
+ "learning_rate": 2.7528345762602125e-06,
2667
+ "loss": 1.4168,
2668
+ "step": 373
2669
+ },
2670
+ {
2671
+ "epoch": 1.3703703703703702,
2672
+ "grad_norm": 0.74609375,
2673
+ "learning_rate": 2.724359964559958e-06,
2674
+ "loss": 1.4603,
2675
+ "step": 374
2676
+ },
2677
+ {
2678
+ "epoch": 1.3740396743492718,
2679
+ "grad_norm": 0.69140625,
2680
+ "learning_rate": 2.695978136547476e-06,
2681
+ "loss": 1.5096,
2682
+ "step": 375
2683
+ },
2684
+ {
2685
+ "epoch": 1.3777089783281733,
2686
+ "grad_norm": 0.6875,
2687
+ "learning_rate": 2.6676902494227795e-06,
2688
+ "loss": 1.4904,
2689
+ "step": 376
2690
+ },
2691
+ {
2692
+ "epoch": 1.3813782823070748,
2693
+ "grad_norm": 0.69140625,
2694
+ "learning_rate": 2.639497456555663e-06,
2695
+ "loss": 1.4747,
2696
+ "step": 377
2697
+ },
2698
+ {
2699
+ "epoch": 1.3850475862859764,
2700
+ "grad_norm": 0.7109375,
2701
+ "learning_rate": 2.611400907438685e-06,
2702
+ "loss": 1.4133,
2703
+ "step": 378
2704
+ },
2705
+ {
2706
+ "epoch": 1.3887168902648779,
2707
+ "grad_norm": 0.69140625,
2708
+ "learning_rate": 2.583401747640307e-06,
2709
+ "loss": 1.5062,
2710
+ "step": 379
2711
+ },
2712
+ {
2713
+ "epoch": 1.3923861942437794,
2714
+ "grad_norm": 0.7109375,
2715
+ "learning_rate": 2.555501118758167e-06,
2716
+ "loss": 1.4598,
2717
+ "step": 380
2718
+ },
2719
+ {
2720
+ "epoch": 1.396055498222681,
2721
+ "grad_norm": 0.703125,
2722
+ "learning_rate": 2.527700158372548e-06,
2723
+ "loss": 1.5289,
2724
+ "step": 381
2725
+ },
2726
+ {
2727
+ "epoch": 1.3997248022015825,
2728
+ "grad_norm": 0.7578125,
2729
+ "learning_rate": 2.5000000000000015e-06,
2730
+ "loss": 1.4476,
2731
+ "step": 382
2732
+ },
2733
+ {
2734
+ "epoch": 1.403394106180484,
2735
+ "grad_norm": 0.6796875,
2736
+ "learning_rate": 2.472401773047107e-06,
2737
+ "loss": 1.4792,
2738
+ "step": 383
2739
+ },
2740
+ {
2741
+ "epoch": 1.4070634101593855,
2742
+ "grad_norm": 0.67578125,
2743
+ "learning_rate": 2.4449066027644473e-06,
2744
+ "loss": 1.4892,
2745
+ "step": 384
2746
+ },
2747
+ {
2748
+ "epoch": 1.4107327141382868,
2749
+ "grad_norm": 0.6796875,
2750
+ "learning_rate": 2.4175156102007237e-06,
2751
+ "loss": 1.4466,
2752
+ "step": 385
2753
+ },
2754
+ {
2755
+ "epoch": 1.4144020181171884,
2756
+ "grad_norm": 0.6484375,
2757
+ "learning_rate": 2.3902299121570332e-06,
2758
+ "loss": 1.4712,
2759
+ "step": 386
2760
+ },
2761
+ {
2762
+ "epoch": 1.4180713220960899,
2763
+ "grad_norm": 0.69140625,
2764
+ "learning_rate": 2.363050621141354e-06,
2765
+ "loss": 1.4384,
2766
+ "step": 387
2767
+ },
2768
+ {
2769
+ "epoch": 1.4217406260749914,
2770
+ "grad_norm": 0.69140625,
2771
+ "learning_rate": 2.3359788453231723e-06,
2772
+ "loss": 1.4767,
2773
+ "step": 388
2774
+ },
2775
+ {
2776
+ "epoch": 1.425409930053893,
2777
+ "grad_norm": 0.70703125,
2778
+ "learning_rate": 2.309015688488305e-06,
2779
+ "loss": 1.4331,
2780
+ "step": 389
2781
+ },
2782
+ {
2783
+ "epoch": 1.4290792340327945,
2784
+ "grad_norm": 0.734375,
2785
+ "learning_rate": 2.282162249993895e-06,
2786
+ "loss": 1.4562,
2787
+ "step": 390
2788
+ },
2789
+ {
2790
+ "epoch": 1.4327485380116958,
2791
+ "grad_norm": 0.6875,
2792
+ "learning_rate": 2.2554196247235843e-06,
2793
+ "loss": 1.4585,
2794
+ "step": 391
2795
+ },
2796
+ {
2797
+ "epoch": 1.4364178419905973,
2798
+ "grad_norm": 0.72265625,
2799
+ "learning_rate": 2.228788903042877e-06,
2800
+ "loss": 1.4396,
2801
+ "step": 392
2802
+ },
2803
+ {
2804
+ "epoch": 1.4400871459694988,
2805
+ "grad_norm": 0.703125,
2806
+ "learning_rate": 2.20227117075468e-06,
2807
+ "loss": 1.4184,
2808
+ "step": 393
2809
+ },
2810
+ {
2811
+ "epoch": 1.4437564499484004,
2812
+ "grad_norm": 0.67578125,
2813
+ "learning_rate": 2.175867509055033e-06,
2814
+ "loss": 1.4734,
2815
+ "step": 394
2816
+ },
2817
+ {
2818
+ "epoch": 1.4474257539273019,
2819
+ "grad_norm": 0.703125,
2820
+ "learning_rate": 2.149578994489024e-06,
2821
+ "loss": 1.4194,
2822
+ "step": 395
2823
+ },
2824
+ {
2825
+ "epoch": 1.4510950579062034,
2826
+ "grad_norm": 0.69921875,
2827
+ "learning_rate": 2.1234066989068972e-06,
2828
+ "loss": 1.4408,
2829
+ "step": 396
2830
+ },
2831
+ {
2832
+ "epoch": 1.454764361885105,
2833
+ "grad_norm": 0.6484375,
2834
+ "learning_rate": 2.0973516894203507e-06,
2835
+ "loss": 1.4546,
2836
+ "step": 397
2837
+ },
2838
+ {
2839
+ "epoch": 1.4584336658640065,
2840
+ "grad_norm": 0.6875,
2841
+ "learning_rate": 2.071415028359026e-06,
2842
+ "loss": 1.4392,
2843
+ "step": 398
2844
+ },
2845
+ {
2846
+ "epoch": 1.462102969842908,
2847
+ "grad_norm": 0.7109375,
2848
+ "learning_rate": 2.045597773227199e-06,
2849
+ "loss": 1.4277,
2850
+ "step": 399
2851
+ },
2852
+ {
2853
+ "epoch": 1.4657722738218095,
2854
+ "grad_norm": 0.68359375,
2855
+ "learning_rate": 2.019900976660651e-06,
2856
+ "loss": 1.4296,
2857
+ "step": 400
2858
+ },
2859
+ {
2860
+ "epoch": 1.469441577800711,
2861
+ "grad_norm": 0.73046875,
2862
+ "learning_rate": 1.994325686383769e-06,
2863
+ "loss": 1.5095,
2864
+ "step": 401
2865
+ },
2866
+ {
2867
+ "epoch": 1.4731108817796124,
2868
+ "grad_norm": 0.69140625,
2869
+ "learning_rate": 1.9688729451668116e-06,
2870
+ "loss": 1.4765,
2871
+ "step": 402
2872
+ },
2873
+ {
2874
+ "epoch": 1.4767801857585139,
2875
+ "grad_norm": 0.703125,
2876
+ "learning_rate": 1.943543790783392e-06,
2877
+ "loss": 1.5502,
2878
+ "step": 403
2879
+ },
2880
+ {
2881
+ "epoch": 1.4804494897374154,
2882
+ "grad_norm": 0.6640625,
2883
+ "learning_rate": 1.9183392559681812e-06,
2884
+ "loss": 1.5307,
2885
+ "step": 404
2886
+ },
2887
+ {
2888
+ "epoch": 1.484118793716317,
2889
+ "grad_norm": 0.7109375,
2890
+ "learning_rate": 1.8932603683747858e-06,
2891
+ "loss": 1.4203,
2892
+ "step": 405
2893
+ },
2894
+ {
2895
+ "epoch": 1.4877880976952185,
2896
+ "grad_norm": 0.703125,
2897
+ "learning_rate": 1.8683081505338468e-06,
2898
+ "loss": 1.4446,
2899
+ "step": 406
2900
+ },
2901
+ {
2902
+ "epoch": 1.49145740167412,
2903
+ "grad_norm": 0.69921875,
2904
+ "learning_rate": 1.8434836198113642e-06,
2905
+ "loss": 1.4736,
2906
+ "step": 407
2907
+ },
2908
+ {
2909
+ "epoch": 1.4951267056530213,
2910
+ "grad_norm": 0.671875,
2911
+ "learning_rate": 1.8187877883672024e-06,
2912
+ "loss": 1.4205,
2913
+ "step": 408
2914
+ },
2915
+ {
2916
+ "epoch": 1.4987960096319228,
2917
+ "grad_norm": 0.69140625,
2918
+ "learning_rate": 1.7942216631138188e-06,
2919
+ "loss": 1.4663,
2920
+ "step": 409
2921
+ },
2922
+ {
2923
+ "epoch": 1.5024653136108244,
2924
+ "grad_norm": 0.7109375,
2925
+ "learning_rate": 1.7697862456752273e-06,
2926
+ "loss": 1.4042,
2927
+ "step": 410
2928
+ },
2929
+ {
2930
+ "epoch": 1.5061346175897259,
2931
+ "grad_norm": 0.7265625,
2932
+ "learning_rate": 1.745482532346145e-06,
2933
+ "loss": 1.4556,
2934
+ "step": 411
2935
+ },
2936
+ {
2937
+ "epoch": 1.5098039215686274,
2938
+ "grad_norm": 0.71875,
2939
+ "learning_rate": 1.7213115140513687e-06,
2940
+ "loss": 1.477,
2941
+ "step": 412
2942
+ },
2943
+ {
2944
+ "epoch": 1.513473225547529,
2945
+ "grad_norm": 0.64453125,
2946
+ "learning_rate": 1.6972741763053835e-06,
2947
+ "loss": 1.4345,
2948
+ "step": 413
2949
+ },
2950
+ {
2951
+ "epoch": 1.5171425295264305,
2952
+ "grad_norm": 0.70703125,
2953
+ "learning_rate": 1.6733714991721738e-06,
2954
+ "loss": 1.4701,
2955
+ "step": 414
2956
+ },
2957
+ {
2958
+ "epoch": 1.5171425295264305,
2959
+ "eval_loss": 1.4025633335113525,
2960
+ "eval_runtime": 86.4313,
2961
+ "eval_samples_per_second": 8.967,
2962
+ "eval_steps_per_second": 4.489,
2963
+ "step": 414
2964
+ },
2965
+ {
2966
+ "epoch": 1.520811833505332,
2967
+ "grad_norm": 0.71484375,
2968
+ "learning_rate": 1.649604457225266e-06,
2969
+ "loss": 1.4382,
2970
+ "step": 415
2971
+ },
2972
+ {
2973
+ "epoch": 1.5244811374842335,
2974
+ "grad_norm": 0.66796875,
2975
+ "learning_rate": 1.6259740195079903e-06,
2976
+ "loss": 1.4168,
2977
+ "step": 416
2978
+ },
2979
+ {
2980
+ "epoch": 1.528150441463135,
2981
+ "grad_norm": 0.67578125,
2982
+ "learning_rate": 1.6024811494939723e-06,
2983
+ "loss": 1.4085,
2984
+ "step": 417
2985
+ },
2986
+ {
2987
+ "epoch": 1.5318197454420366,
2988
+ "grad_norm": 0.67578125,
2989
+ "learning_rate": 1.5791268050478487e-06,
2990
+ "loss": 1.3977,
2991
+ "step": 418
2992
+ },
2993
+ {
2994
+ "epoch": 1.5354890494209381,
2995
+ "grad_norm": 0.71484375,
2996
+ "learning_rate": 1.555911938386213e-06,
2997
+ "loss": 1.472,
2998
+ "step": 419
2999
+ },
3000
+ {
3001
+ "epoch": 1.5391583533998394,
3002
+ "grad_norm": 0.6953125,
3003
+ "learning_rate": 1.532837496038792e-06,
3004
+ "loss": 1.4904,
3005
+ "step": 420
3006
+ },
3007
+ {
3008
+ "epoch": 1.542827657378741,
3009
+ "grad_norm": 0.65625,
3010
+ "learning_rate": 1.509904418809852e-06,
3011
+ "loss": 1.4428,
3012
+ "step": 421
3013
+ },
3014
+ {
3015
+ "epoch": 1.5464969613576425,
3016
+ "grad_norm": 0.6875,
3017
+ "learning_rate": 1.4871136417398407e-06,
3018
+ "loss": 1.4436,
3019
+ "step": 422
3020
+ },
3021
+ {
3022
+ "epoch": 1.550166265336544,
3023
+ "grad_norm": 0.72265625,
3024
+ "learning_rate": 1.4644660940672628e-06,
3025
+ "loss": 1.4498,
3026
+ "step": 423
3027
+ },
3028
+ {
3029
+ "epoch": 1.5538355693154455,
3030
+ "grad_norm": 0.703125,
3031
+ "learning_rate": 1.4419626991907925e-06,
3032
+ "loss": 1.4447,
3033
+ "step": 424
3034
+ },
3035
+ {
3036
+ "epoch": 1.5575048732943468,
3037
+ "grad_norm": 0.6796875,
3038
+ "learning_rate": 1.4196043746316252e-06,
3039
+ "loss": 1.4668,
3040
+ "step": 425
3041
+ },
3042
+ {
3043
+ "epoch": 1.5611741772732484,
3044
+ "grad_norm": 0.71875,
3045
+ "learning_rate": 1.3973920319960654e-06,
3046
+ "loss": 1.4628,
3047
+ "step": 426
3048
+ },
3049
+ {
3050
+ "epoch": 1.56484348125215,
3051
+ "grad_norm": 0.6796875,
3052
+ "learning_rate": 1.3753265769383605e-06,
3053
+ "loss": 1.4474,
3054
+ "step": 427
3055
+ },
3056
+ {
3057
+ "epoch": 1.5685127852310514,
3058
+ "grad_norm": 0.7421875,
3059
+ "learning_rate": 1.3534089091237757e-06,
3060
+ "loss": 1.5121,
3061
+ "step": 428
3062
+ },
3063
+ {
3064
+ "epoch": 1.572182089209953,
3065
+ "grad_norm": 0.7421875,
3066
+ "learning_rate": 1.3316399221919075e-06,
3067
+ "loss": 1.4867,
3068
+ "step": 429
3069
+ },
3070
+ {
3071
+ "epoch": 1.5758513931888545,
3072
+ "grad_norm": 0.6640625,
3073
+ "learning_rate": 1.310020503720254e-06,
3074
+ "loss": 1.4734,
3075
+ "step": 430
3076
+ },
3077
+ {
3078
+ "epoch": 1.579520697167756,
3079
+ "grad_norm": 0.6953125,
3080
+ "learning_rate": 1.2885515351880217e-06,
3081
+ "loss": 1.4975,
3082
+ "step": 431
3083
+ },
3084
+ {
3085
+ "epoch": 1.5831900011466575,
3086
+ "grad_norm": 0.71484375,
3087
+ "learning_rate": 1.2672338919401866e-06,
3088
+ "loss": 1.4988,
3089
+ "step": 432
3090
+ },
3091
+ {
3092
+ "epoch": 1.586859305125559,
3093
+ "grad_norm": 0.6640625,
3094
+ "learning_rate": 1.2460684431518055e-06,
3095
+ "loss": 1.4176,
3096
+ "step": 433
3097
+ },
3098
+ {
3099
+ "epoch": 1.5905286091044606,
3100
+ "grad_norm": 0.671875,
3101
+ "learning_rate": 1.2250560517925747e-06,
3102
+ "loss": 1.485,
3103
+ "step": 434
3104
+ },
3105
+ {
3106
+ "epoch": 1.5941979130833621,
3107
+ "grad_norm": 0.7265625,
3108
+ "learning_rate": 1.2041975745916474e-06,
3109
+ "loss": 1.5118,
3110
+ "step": 435
3111
+ },
3112
+ {
3113
+ "epoch": 1.5978672170622636,
3114
+ "grad_norm": 0.68359375,
3115
+ "learning_rate": 1.183493862002702e-06,
3116
+ "loss": 1.4962,
3117
+ "step": 436
3118
+ },
3119
+ {
3120
+ "epoch": 1.601536521041165,
3121
+ "grad_norm": 0.71875,
3122
+ "learning_rate": 1.1629457581692616e-06,
3123
+ "loss": 1.4537,
3124
+ "step": 437
3125
+ },
3126
+ {
3127
+ "epoch": 1.6052058250200665,
3128
+ "grad_norm": 0.6953125,
3129
+ "learning_rate": 1.1425541008902852e-06,
3130
+ "loss": 1.4817,
3131
+ "step": 438
3132
+ },
3133
+ {
3134
+ "epoch": 1.608875128998968,
3135
+ "grad_norm": 0.7109375,
3136
+ "learning_rate": 1.1223197215860065e-06,
3137
+ "loss": 1.4836,
3138
+ "step": 439
3139
+ },
3140
+ {
3141
+ "epoch": 1.6125444329778695,
3142
+ "grad_norm": 0.65234375,
3143
+ "learning_rate": 1.1022434452640252e-06,
3144
+ "loss": 1.4107,
3145
+ "step": 440
3146
+ },
3147
+ {
3148
+ "epoch": 1.616213736956771,
3149
+ "grad_norm": 0.671875,
3150
+ "learning_rate": 1.0823260904856791e-06,
3151
+ "loss": 1.4774,
3152
+ "step": 441
3153
+ },
3154
+ {
3155
+ "epoch": 1.6198830409356724,
3156
+ "grad_norm": 0.671875,
3157
+ "learning_rate": 1.0625684693326727e-06,
3158
+ "loss": 1.4588,
3159
+ "step": 442
3160
+ },
3161
+ {
3162
+ "epoch": 1.623552344914574,
3163
+ "grad_norm": 0.6875,
3164
+ "learning_rate": 1.0429713873739505e-06,
3165
+ "loss": 1.4889,
3166
+ "step": 443
3167
+ },
3168
+ {
3169
+ "epoch": 1.6272216488934754,
3170
+ "grad_norm": 0.68359375,
3171
+ "learning_rate": 1.0235356436328675e-06,
3172
+ "loss": 1.4724,
3173
+ "step": 444
3174
+ },
3175
+ {
3176
+ "epoch": 1.630890952872377,
3177
+ "grad_norm": 0.671875,
3178
+ "learning_rate": 1.0042620305546069e-06,
3179
+ "loss": 1.4209,
3180
+ "step": 445
3181
+ },
3182
+ {
3183
+ "epoch": 1.6345602568512785,
3184
+ "grad_norm": 0.70703125,
3185
+ "learning_rate": 9.851513339738627e-07,
3186
+ "loss": 1.4596,
3187
+ "step": 446
3188
+ },
3189
+ {
3190
+ "epoch": 1.63822956083018,
3191
+ "grad_norm": 0.77734375,
3192
+ "learning_rate": 9.662043330828086e-07,
3193
+ "loss": 1.4493,
3194
+ "step": 447
3195
+ },
3196
+ {
3197
+ "epoch": 1.6418988648090815,
3198
+ "grad_norm": 0.6640625,
3199
+ "learning_rate": 9.474218003993275e-07,
3200
+ "loss": 1.4586,
3201
+ "step": 448
3202
+ },
3203
+ {
3204
+ "epoch": 1.645568168787983,
3205
+ "grad_norm": 0.6875,
3206
+ "learning_rate": 9.288045017355035e-07,
3207
+ "loss": 1.4519,
3208
+ "step": 449
3209
+ },
3210
+ {
3211
+ "epoch": 1.6492374727668846,
3212
+ "grad_norm": 0.6796875,
3213
+ "learning_rate": 9.10353196166412e-07,
3214
+ "loss": 1.4053,
3215
+ "step": 450
3216
+ },
3217
+ {
3218
+ "epoch": 1.6529067767457861,
3219
+ "grad_norm": 0.66796875,
3220
+ "learning_rate": 8.920686359991615e-07,
3221
+ "loss": 1.452,
3222
+ "step": 451
3223
+ },
3224
+ {
3225
+ "epoch": 1.6565760807246876,
3226
+ "grad_norm": 0.67578125,
3227
+ "learning_rate": 8.739515667422211e-07,
3228
+ "loss": 1.4608,
3229
+ "step": 452
3230
+ },
3231
+ {
3232
+ "epoch": 1.6602453847035892,
3233
+ "grad_norm": 0.73828125,
3234
+ "learning_rate": 8.560027270750276e-07,
3235
+ "loss": 1.4702,
3236
+ "step": 453
3237
+ },
3238
+ {
3239
+ "epoch": 1.6639146886824907,
3240
+ "grad_norm": 0.7109375,
3241
+ "learning_rate": 8.382228488178639e-07,
3242
+ "loss": 1.4405,
3243
+ "step": 454
3244
+ },
3245
+ {
3246
+ "epoch": 1.667583992661392,
3247
+ "grad_norm": 0.6796875,
3248
+ "learning_rate": 8.206126569020234e-07,
3249
+ "loss": 1.4379,
3250
+ "step": 455
3251
+ },
3252
+ {
3253
+ "epoch": 1.6712532966402935,
3254
+ "grad_norm": 0.7265625,
3255
+ "learning_rate": 8.031728693402502e-07,
3256
+ "loss": 1.4604,
3257
+ "step": 456
3258
+ },
3259
+ {
3260
+ "epoch": 1.674922600619195,
3261
+ "grad_norm": 0.71484375,
3262
+ "learning_rate": 7.859041971974668e-07,
3263
+ "loss": 1.4493,
3264
+ "step": 457
3265
+ },
3266
+ {
3267
+ "epoch": 1.6785919045980966,
3268
+ "grad_norm": 0.7109375,
3269
+ "learning_rate": 7.6880734456178e-07,
3270
+ "loss": 1.4538,
3271
+ "step": 458
3272
+ },
3273
+ {
3274
+ "epoch": 1.682261208576998,
3275
+ "grad_norm": 0.66015625,
3276
+ "learning_rate": 7.518830085157735e-07,
3277
+ "loss": 1.4081,
3278
+ "step": 459
3279
+ },
3280
+ {
3281
+ "epoch": 1.6859305125558994,
3282
+ "grad_norm": 0.69140625,
3283
+ "learning_rate": 7.351318791080881e-07,
3284
+ "loss": 1.4211,
3285
+ "step": 460
3286
+ },
3287
+ {
3288
+ "epoch": 1.689599816534801,
3289
+ "grad_norm": 0.70703125,
3290
+ "learning_rate": 7.185546393252835e-07,
3291
+ "loss": 1.4536,
3292
+ "step": 461
3293
+ },
3294
+ {
3295
+ "epoch": 1.6932691205137025,
3296
+ "grad_norm": 0.6953125,
3297
+ "learning_rate": 7.021519650639952e-07,
3298
+ "loss": 1.4332,
3299
+ "step": 462
3300
+ },
3301
+ {
3302
+ "epoch": 1.696938424492604,
3303
+ "grad_norm": 0.6953125,
3304
+ "learning_rate": 6.859245251033697e-07,
3305
+ "loss": 1.4219,
3306
+ "step": 463
3307
+ },
3308
+ {
3309
+ "epoch": 1.7006077284715055,
3310
+ "grad_norm": 0.671875,
3311
+ "learning_rate": 6.698729810778065e-07,
3312
+ "loss": 1.4141,
3313
+ "step": 464
3314
+ },
3315
+ {
3316
+ "epoch": 1.704277032450407,
3317
+ "grad_norm": 0.68359375,
3318
+ "learning_rate": 6.539979874499747e-07,
3319
+ "loss": 1.4131,
3320
+ "step": 465
3321
+ },
3322
+ {
3323
+ "epoch": 1.7079463364293086,
3324
+ "grad_norm": 0.69140625,
3325
+ "learning_rate": 6.383001914841252e-07,
3326
+ "loss": 1.423,
3327
+ "step": 466
3328
+ },
3329
+ {
3330
+ "epoch": 1.7116156404082101,
3331
+ "grad_norm": 0.68359375,
3332
+ "learning_rate": 6.227802332197125e-07,
3333
+ "loss": 1.4103,
3334
+ "step": 467
3335
+ },
3336
+ {
3337
+ "epoch": 1.7152849443871117,
3338
+ "grad_norm": 0.70703125,
3339
+ "learning_rate": 6.074387454452891e-07,
3340
+ "loss": 1.4348,
3341
+ "step": 468
3342
+ },
3343
+ {
3344
+ "epoch": 1.7189542483660132,
3345
+ "grad_norm": 0.69921875,
3346
+ "learning_rate": 5.922763536727023e-07,
3347
+ "loss": 1.4247,
3348
+ "step": 469
3349
+ },
3350
+ {
3351
+ "epoch": 1.7226235523449147,
3352
+ "grad_norm": 0.71875,
3353
+ "learning_rate": 5.772936761116027e-07,
3354
+ "loss": 1.4569,
3355
+ "step": 470
3356
+ },
3357
+ {
3358
+ "epoch": 1.7262928563238162,
3359
+ "grad_norm": 0.703125,
3360
+ "learning_rate": 5.624913236442287e-07,
3361
+ "loss": 1.5117,
3362
+ "step": 471
3363
+ },
3364
+ {
3365
+ "epoch": 1.7299621603027175,
3366
+ "grad_norm": 0.66015625,
3367
+ "learning_rate": 5.478698998004967e-07,
3368
+ "loss": 1.3924,
3369
+ "step": 472
3370
+ },
3371
+ {
3372
+ "epoch": 1.733631464281619,
3373
+ "grad_norm": 0.7421875,
3374
+ "learning_rate": 5.334300007334065e-07,
3375
+ "loss": 1.4718,
3376
+ "step": 473
3377
+ },
3378
+ {
3379
+ "epoch": 1.7373007682605206,
3380
+ "grad_norm": 0.6953125,
3381
+ "learning_rate": 5.191722151947227e-07,
3382
+ "loss": 1.4761,
3383
+ "step": 474
3384
+ },
3385
+ {
3386
+ "epoch": 1.7409700722394221,
3387
+ "grad_norm": 0.75,
3388
+ "learning_rate": 5.050971245109709e-07,
3389
+ "loss": 1.526,
3390
+ "step": 475
3391
+ },
3392
+ {
3393
+ "epoch": 1.7446393762183234,
3394
+ "grad_norm": 0.6640625,
3395
+ "learning_rate": 4.91205302559743e-07,
3396
+ "loss": 1.4963,
3397
+ "step": 476
3398
+ },
3399
+ {
3400
+ "epoch": 1.748308680197225,
3401
+ "grad_norm": 0.703125,
3402
+ "learning_rate": 4.77497315746292e-07,
3403
+ "loss": 1.5114,
3404
+ "step": 477
3405
+ },
3406
+ {
3407
+ "epoch": 1.7519779841761265,
3408
+ "grad_norm": 0.69921875,
3409
+ "learning_rate": 4.639737229804403e-07,
3410
+ "loss": 1.456,
3411
+ "step": 478
3412
+ },
3413
+ {
3414
+ "epoch": 1.755647288155028,
3415
+ "grad_norm": 0.734375,
3416
+ "learning_rate": 4.5063507565379195e-07,
3417
+ "loss": 1.4133,
3418
+ "step": 479
3419
+ },
3420
+ {
3421
+ "epoch": 1.7593165921339295,
3422
+ "grad_norm": 0.69921875,
3423
+ "learning_rate": 4.374819176172501e-07,
3424
+ "loss": 1.4294,
3425
+ "step": 480
3426
+ },
3427
+ {
3428
+ "epoch": 1.762985896112831,
3429
+ "grad_norm": 0.71484375,
3430
+ "learning_rate": 4.245147851588438e-07,
3431
+ "loss": 1.3787,
3432
+ "step": 481
3433
+ },
3434
+ {
3435
+ "epoch": 1.7666552000917326,
3436
+ "grad_norm": 0.671875,
3437
+ "learning_rate": 4.1173420698186027e-07,
3438
+ "loss": 1.4484,
3439
+ "step": 482
3440
+ },
3441
+ {
3442
+ "epoch": 1.7703245040706341,
3443
+ "grad_norm": 0.7265625,
3444
+ "learning_rate": 3.9914070418329123e-07,
3445
+ "loss": 1.4657,
3446
+ "step": 483
3447
+ },
3448
+ {
3449
+ "epoch": 1.7703245040706341,
3450
+ "eval_loss": 1.4023281335830688,
3451
+ "eval_runtime": 86.4904,
3452
+ "eval_samples_per_second": 8.961,
3453
+ "eval_steps_per_second": 4.486,
3454
+ "step": 483
3455
+ },
3456
+ {
3457
+ "epoch": 1.7739938080495357,
3458
+ "grad_norm": 0.66015625,
3459
+ "learning_rate": 3.8673479023258464e-07,
3460
+ "loss": 1.4335,
3461
+ "step": 484
3462
+ },
3463
+ {
3464
+ "epoch": 1.7776631120284372,
3465
+ "grad_norm": 0.6953125,
3466
+ "learning_rate": 3.7451697095070736e-07,
3467
+ "loss": 1.4129,
3468
+ "step": 485
3469
+ },
3470
+ {
3471
+ "epoch": 1.7813324160073387,
3472
+ "grad_norm": 0.66015625,
3473
+ "learning_rate": 3.6248774448952695e-07,
3474
+ "loss": 1.3978,
3475
+ "step": 486
3476
+ },
3477
+ {
3478
+ "epoch": 1.7850017199862402,
3479
+ "grad_norm": 0.83984375,
3480
+ "learning_rate": 3.506476013114946e-07,
3481
+ "loss": 1.4322,
3482
+ "step": 487
3483
+ },
3484
+ {
3485
+ "epoch": 1.7886710239651418,
3486
+ "grad_norm": 0.671875,
3487
+ "learning_rate": 3.3899702416965166e-07,
3488
+ "loss": 1.4127,
3489
+ "step": 488
3490
+ },
3491
+ {
3492
+ "epoch": 1.792340327944043,
3493
+ "grad_norm": 0.69921875,
3494
+ "learning_rate": 3.2753648808794505e-07,
3495
+ "loss": 1.418,
3496
+ "step": 489
3497
+ },
3498
+ {
3499
+ "epoch": 1.7960096319229446,
3500
+ "grad_norm": 0.67578125,
3501
+ "learning_rate": 3.1626646034186084e-07,
3502
+ "loss": 1.4601,
3503
+ "step": 490
3504
+ },
3505
+ {
3506
+ "epoch": 1.7996789359018461,
3507
+ "grad_norm": 0.66015625,
3508
+ "learning_rate": 3.05187400439369e-07,
3509
+ "loss": 1.4585,
3510
+ "step": 491
3511
+ },
3512
+ {
3513
+ "epoch": 1.8033482398807477,
3514
+ "grad_norm": 0.65625,
3515
+ "learning_rate": 2.942997601021924e-07,
3516
+ "loss": 1.4127,
3517
+ "step": 492
3518
+ },
3519
+ {
3520
+ "epoch": 1.807017543859649,
3521
+ "grad_norm": 0.66796875,
3522
+ "learning_rate": 2.8360398324738415e-07,
3523
+ "loss": 1.437,
3524
+ "step": 493
3525
+ },
3526
+ {
3527
+ "epoch": 1.8106868478385505,
3528
+ "grad_norm": 0.6875,
3529
+ "learning_rate": 2.7310050596923323e-07,
3530
+ "loss": 1.4759,
3531
+ "step": 494
3532
+ },
3533
+ {
3534
+ "epoch": 1.814356151817452,
3535
+ "grad_norm": 0.703125,
3536
+ "learning_rate": 2.6278975652147875e-07,
3537
+ "loss": 1.4243,
3538
+ "step": 495
3539
+ },
3540
+ {
3541
+ "epoch": 1.8180254557963536,
3542
+ "grad_norm": 0.6796875,
3543
+ "learning_rate": 2.5267215529985346e-07,
3544
+ "loss": 1.4361,
3545
+ "step": 496
3546
+ },
3547
+ {
3548
+ "epoch": 1.821694759775255,
3549
+ "grad_norm": 0.66796875,
3550
+ "learning_rate": 2.427481148249383e-07,
3551
+ "loss": 1.4993,
3552
+ "step": 497
3553
+ },
3554
+ {
3555
+ "epoch": 1.8253640637541566,
3556
+ "grad_norm": 0.6796875,
3557
+ "learning_rate": 2.330180397253473e-07,
3558
+ "loss": 1.4303,
3559
+ "step": 498
3560
+ },
3561
+ {
3562
+ "epoch": 1.8290333677330581,
3563
+ "grad_norm": 0.71875,
3564
+ "learning_rate": 2.2348232672122937e-07,
3565
+ "loss": 1.4701,
3566
+ "step": 499
3567
+ },
3568
+ {
3569
+ "epoch": 1.8327026717119597,
3570
+ "grad_norm": 0.70703125,
3571
+ "learning_rate": 2.141413646080881e-07,
3572
+ "loss": 1.4679,
3573
+ "step": 500
3574
+ },
3575
+ {
3576
+ "epoch": 1.8363719756908612,
3577
+ "grad_norm": 0.71484375,
3578
+ "learning_rate": 2.049955342409349e-07,
3579
+ "loss": 1.4148,
3580
+ "step": 501
3581
+ },
3582
+ {
3583
+ "epoch": 1.8400412796697627,
3584
+ "grad_norm": 0.6875,
3585
+ "learning_rate": 1.9604520851876196e-07,
3586
+ "loss": 1.4347,
3587
+ "step": 502
3588
+ },
3589
+ {
3590
+ "epoch": 1.8437105836486642,
3591
+ "grad_norm": 0.6875,
3592
+ "learning_rate": 1.8729075236932903e-07,
3593
+ "loss": 1.4295,
3594
+ "step": 503
3595
+ },
3596
+ {
3597
+ "epoch": 1.8473798876275658,
3598
+ "grad_norm": 0.734375,
3599
+ "learning_rate": 1.787325227342951e-07,
3600
+ "loss": 1.4682,
3601
+ "step": 504
3602
+ },
3603
+ {
3604
+ "epoch": 1.8510491916064673,
3605
+ "grad_norm": 0.69140625,
3606
+ "learning_rate": 1.7037086855465902e-07,
3607
+ "loss": 1.4757,
3608
+ "step": 505
3609
+ },
3610
+ {
3611
+ "epoch": 1.8547184955853686,
3612
+ "grad_norm": 0.6953125,
3613
+ "learning_rate": 1.6220613075653201e-07,
3614
+ "loss": 1.5042,
3615
+ "step": 506
3616
+ },
3617
+ {
3618
+ "epoch": 1.8583877995642701,
3619
+ "grad_norm": 0.69921875,
3620
+ "learning_rate": 1.542386422372405e-07,
3621
+ "loss": 1.4796,
3622
+ "step": 507
3623
+ },
3624
+ {
3625
+ "epoch": 1.8620571035431717,
3626
+ "grad_norm": 0.703125,
3627
+ "learning_rate": 1.4646872785175182e-07,
3628
+ "loss": 1.4894,
3629
+ "step": 508
3630
+ },
3631
+ {
3632
+ "epoch": 1.8657264075220732,
3633
+ "grad_norm": 0.71875,
3634
+ "learning_rate": 1.388967043994266e-07,
3635
+ "loss": 1.4937,
3636
+ "step": 509
3637
+ },
3638
+ {
3639
+ "epoch": 1.8693957115009745,
3640
+ "grad_norm": 0.71875,
3641
+ "learning_rate": 1.3152288061110518e-07,
3642
+ "loss": 1.4662,
3643
+ "step": 510
3644
+ },
3645
+ {
3646
+ "epoch": 1.873065015479876,
3647
+ "grad_norm": 0.66015625,
3648
+ "learning_rate": 1.243475571365177e-07,
3649
+ "loss": 1.4787,
3650
+ "step": 511
3651
+ },
3652
+ {
3653
+ "epoch": 1.8767343194587776,
3654
+ "grad_norm": 0.70703125,
3655
+ "learning_rate": 1.1737102653202825e-07,
3656
+ "loss": 1.4981,
3657
+ "step": 512
3658
+ },
3659
+ {
3660
+ "epoch": 1.880403623437679,
3661
+ "grad_norm": 0.65625,
3662
+ "learning_rate": 1.1059357324870456e-07,
3663
+ "loss": 1.4118,
3664
+ "step": 513
3665
+ },
3666
+ {
3667
+ "epoch": 1.8840729274165806,
3668
+ "grad_norm": 0.6953125,
3669
+ "learning_rate": 1.0401547362071939e-07,
3670
+ "loss": 1.4272,
3671
+ "step": 514
3672
+ },
3673
+ {
3674
+ "epoch": 1.8877422313954821,
3675
+ "grad_norm": 0.66015625,
3676
+ "learning_rate": 9.763699585408737e-08,
3677
+ "loss": 1.4569,
3678
+ "step": 515
3679
+ },
3680
+ {
3681
+ "epoch": 1.8914115353743837,
3682
+ "grad_norm": 0.75390625,
3683
+ "learning_rate": 9.145840001572537e-08,
3684
+ "loss": 1.4603,
3685
+ "step": 516
3686
+ },
3687
+ {
3688
+ "epoch": 1.8950808393532852,
3689
+ "grad_norm": 0.69921875,
3690
+ "learning_rate": 8.547993802285215e-08,
3691
+ "loss": 1.4544,
3692
+ "step": 517
3693
+ },
3694
+ {
3695
+ "epoch": 1.8987501433321867,
3696
+ "grad_norm": 0.65625,
3697
+ "learning_rate": 7.970185363271432e-08,
3698
+ "loss": 1.4413,
3699
+ "step": 518
3700
+ },
3701
+ {
3702
+ "epoch": 1.9024194473110883,
3703
+ "grad_norm": 0.7109375,
3704
+ "learning_rate": 7.41243824326504e-08,
3705
+ "loss": 1.464,
3706
+ "step": 519
3707
+ },
3708
+ {
3709
+ "epoch": 1.9060887512899898,
3710
+ "grad_norm": 0.69140625,
3711
+ "learning_rate": 6.8747751830483e-08,
3712
+ "loss": 1.4288,
3713
+ "step": 520
3714
+ },
3715
+ {
3716
+ "epoch": 1.9097580552688913,
3717
+ "grad_norm": 0.65234375,
3718
+ "learning_rate": 6.357218104524832e-08,
3719
+ "loss": 1.4477,
3720
+ "step": 521
3721
+ },
3722
+ {
3723
+ "epoch": 1.9134273592477928,
3724
+ "grad_norm": 0.73046875,
3725
+ "learning_rate": 5.8597881098257924e-08,
3726
+ "loss": 1.4375,
3727
+ "step": 522
3728
+ },
3729
+ {
3730
+ "epoch": 1.9170966632266941,
3731
+ "grad_norm": 0.7578125,
3732
+ "learning_rate": 5.382505480449274e-08,
3733
+ "loss": 1.477,
3734
+ "step": 523
3735
+ },
3736
+ {
3737
+ "epoch": 1.9207659672055957,
3738
+ "grad_norm": 0.68359375,
3739
+ "learning_rate": 4.925389676433745e-08,
3740
+ "loss": 1.4289,
3741
+ "step": 524
3742
+ },
3743
+ {
3744
+ "epoch": 1.9244352711844972,
3745
+ "grad_norm": 0.6328125,
3746
+ "learning_rate": 4.48845933556441e-08,
3747
+ "loss": 1.4622,
3748
+ "step": 525
3749
+ },
3750
+ {
3751
+ "epoch": 1.9281045751633987,
3752
+ "grad_norm": 0.671875,
3753
+ "learning_rate": 4.071732272613149e-08,
3754
+ "loss": 1.4519,
3755
+ "step": 526
3756
+ },
3757
+ {
3758
+ "epoch": 1.9317738791423,
3759
+ "grad_norm": 0.67578125,
3760
+ "learning_rate": 3.675225478612432e-08,
3761
+ "loss": 1.4413,
3762
+ "step": 527
3763
+ },
3764
+ {
3765
+ "epoch": 1.9354431831212016,
3766
+ "grad_norm": 0.88671875,
3767
+ "learning_rate": 3.2989551201624836e-08,
3768
+ "loss": 1.4628,
3769
+ "step": 528
3770
+ },
3771
+ {
3772
+ "epoch": 1.939112487100103,
3773
+ "grad_norm": 0.68359375,
3774
+ "learning_rate": 2.9429365387719232e-08,
3775
+ "loss": 1.4706,
3776
+ "step": 529
3777
+ },
3778
+ {
3779
+ "epoch": 1.9427817910790046,
3780
+ "grad_norm": 0.6796875,
3781
+ "learning_rate": 2.6071842502326526e-08,
3782
+ "loss": 1.4369,
3783
+ "step": 530
3784
+ },
3785
+ {
3786
+ "epoch": 1.9464510950579061,
3787
+ "grad_norm": 0.7734375,
3788
+ "learning_rate": 2.2917119440275524e-08,
3789
+ "loss": 1.4531,
3790
+ "step": 531
3791
+ },
3792
+ {
3793
+ "epoch": 1.9501203990368077,
3794
+ "grad_norm": 0.69140625,
3795
+ "learning_rate": 1.996532482772595e-08,
3796
+ "loss": 1.4549,
3797
+ "step": 532
3798
+ },
3799
+ {
3800
+ "epoch": 1.9537897030157092,
3801
+ "grad_norm": 0.65625,
3802
+ "learning_rate": 1.7216579016925415e-08,
3803
+ "loss": 1.4083,
3804
+ "step": 533
3805
+ },
3806
+ {
3807
+ "epoch": 1.9574590069946107,
3808
+ "grad_norm": 0.6796875,
3809
+ "learning_rate": 1.4670994081297796e-08,
3810
+ "loss": 1.4555,
3811
+ "step": 534
3812
+ },
3813
+ {
3814
+ "epoch": 1.9611283109735123,
3815
+ "grad_norm": 0.671875,
3816
+ "learning_rate": 1.2328673810877989e-08,
3817
+ "loss": 1.4682,
3818
+ "step": 535
3819
+ },
3820
+ {
3821
+ "epoch": 1.9647976149524138,
3822
+ "grad_norm": 0.6484375,
3823
+ "learning_rate": 1.0189713708078086e-08,
3824
+ "loss": 1.4423,
3825
+ "step": 536
3826
+ },
3827
+ {
3828
+ "epoch": 1.9684669189313153,
3829
+ "grad_norm": 0.73828125,
3830
+ "learning_rate": 8.254200983794369e-09,
3831
+ "loss": 1.4146,
3832
+ "step": 537
3833
+ },
3834
+ {
3835
+ "epoch": 1.9721362229102168,
3836
+ "grad_norm": 0.6875,
3837
+ "learning_rate": 6.5222145538501595e-09,
3838
+ "loss": 1.433,
3839
+ "step": 538
3840
+ },
3841
+ {
3842
+ "epoch": 1.9758055268891184,
3843
+ "grad_norm": 0.66015625,
3844
+ "learning_rate": 4.9938250357806085e-09,
3845
+ "loss": 1.4467,
3846
+ "step": 539
3847
+ },
3848
+ {
3849
+ "epoch": 1.9794748308680197,
3850
+ "grad_norm": 0.6875,
3851
+ "learning_rate": 3.669094745950008e-09,
3852
+ "loss": 1.4049,
3853
+ "step": 540
3854
+ },
3855
+ {
3856
+ "epoch": 1.9831441348469212,
3857
+ "grad_norm": 0.65234375,
3858
+ "learning_rate": 2.548077697014373e-09,
3859
+ "loss": 1.4835,
3860
+ "step": 541
3861
+ },
3862
+ {
3863
+ "epoch": 1.9868134388258227,
3864
+ "grad_norm": 0.671875,
3865
+ "learning_rate": 1.6308195957182028e-09,
3866
+ "loss": 1.4528,
3867
+ "step": 542
3868
+ },
3869
+ {
3870
+ "epoch": 1.9904827428047243,
3871
+ "grad_norm": 0.7109375,
3872
+ "learning_rate": 9.173578410281992e-10,
3873
+ "loss": 1.4519,
3874
+ "step": 543
3875
+ },
3876
+ {
3877
+ "epoch": 1.9941520467836256,
3878
+ "grad_norm": 0.671875,
3879
+ "learning_rate": 4.0772152261336906e-10,
3880
+ "loss": 1.4406,
3881
+ "step": 544
3882
+ },
3883
+ {
3884
+ "epoch": 1.997821350762527,
3885
+ "grad_norm": 0.7265625,
3886
+ "learning_rate": 1.0193141965486597e-10,
3887
+ "loss": 1.4137,
3888
+ "step": 545
3889
+ },
3890
+ {
3891
+ "epoch": 2.0014906547414286,
3892
+ "grad_norm": 0.703125,
3893
+ "learning_rate": 0.0,
3894
+ "loss": 1.4424,
3895
+ "step": 546
3896
+ }
3897
+ ],
3898
+ "logging_steps": 1,
3899
+ "max_steps": 546,
3900
+ "num_input_tokens_seen": 0,
3901
+ "num_train_epochs": 2,
3902
+ "save_steps": 137,
3903
+ "stateful_callbacks": {
3904
+ "TrainerControl": {
3905
+ "args": {
3906
+ "should_epoch_stop": false,
3907
+ "should_evaluate": false,
3908
+ "should_log": false,
3909
+ "should_save": true,
3910
+ "should_training_stop": true
3911
+ },
3912
+ "attributes": {}
3913
+ }
3914
+ },
3915
+ "total_flos": 1.2144265254892732e+19,
3916
+ "train_batch_size": 1,
3917
+ "trial_name": null,
3918
+ "trial_params": null
3919
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2629d03f8cfdaa9cd645a0ce9ebf1c262561700d6e7e7e0434fc1b65708b3c42
3
+ size 6136
vocab.json ADDED
The diff for this file is too large to render. See raw diff