Training in progress, step 10, checkpoint
Browse files- checkpoint-10/adapter_config.json +1 -1
- checkpoint-10/adapter_model.safetensors +1 -1
- checkpoint-10/optimizer.pt +1 -1
- checkpoint-10/rng_state_0.pth +1 -1
- checkpoint-10/rng_state_1.pth +1 -1
- checkpoint-10/scheduler.pt +1 -1
- checkpoint-10/trainer_state.json +6 -6
- checkpoint-10/training_args.bin +1 -1
checkpoint-10/adapter_config.json
CHANGED
@@ -19,9 +19,9 @@
|
|
19 |
"rank_pattern": {},
|
20 |
"revision": null,
|
21 |
"target_modules": [
|
|
|
22 |
"q_proj",
|
23 |
"o_proj",
|
24 |
-
"k_proj",
|
25 |
"v_proj"
|
26 |
],
|
27 |
"task_type": "CAUSAL_LM",
|
|
|
19 |
"rank_pattern": {},
|
20 |
"revision": null,
|
21 |
"target_modules": [
|
22 |
+
"k_proj",
|
23 |
"q_proj",
|
24 |
"o_proj",
|
|
|
25 |
"v_proj"
|
26 |
],
|
27 |
"task_type": "CAUSAL_LM",
|
checkpoint-10/adapter_model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 27297032
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fa53b3eb340a3cddab9910dc84cc4d10f325f5b7548580dd8d98e60db9bec895
|
3 |
size 27297032
|
checkpoint-10/optimizer.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 54678010
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5b865af35c752678c7ceb1bb8351b318a080effc69c85aeaf6715088808c8502
|
3 |
size 54678010
|
checkpoint-10/rng_state_0.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14512
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cca2a297e7c44cdfd2b64bdcfd36aae2623a83c5c0d7be5dc2786f6037cfe92e
|
3 |
size 14512
|
checkpoint-10/rng_state_1.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 14512
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a374c072d59f9b1e72d0cbfc881a7ec98e593fce8d036754d6bd9e4901adc453
|
3 |
size 14512
|
checkpoint-10/scheduler.pt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1000
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1074e332977fc6228b32da9dea4b231ebabb408677fd6d3b87c8383547b8a43c
|
3 |
size 1000
|
checkpoint-10/trainer_state.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"best_metric": 1.
|
3 |
"best_model_checkpoint": "./mistral/29-02-24-Weni-test-folder-upload_Zeroshot-2_max_steps-30_batch_8_2024-02-29_ppid_7/checkpoint-10",
|
4 |
"epoch": 0.006199628022318661,
|
5 |
"eval_steps": 10,
|
@@ -10,10 +10,10 @@
|
|
10 |
"log_history": [
|
11 |
{
|
12 |
"epoch": 0.01,
|
13 |
-
"eval_loss": 1.
|
14 |
-
"eval_runtime":
|
15 |
-
"eval_samples_per_second": 13.
|
16 |
-
"eval_steps_per_second": 3.
|
17 |
"step": 10
|
18 |
}
|
19 |
],
|
@@ -22,7 +22,7 @@
|
|
22 |
"num_input_tokens_seen": 0,
|
23 |
"num_train_epochs": 1,
|
24 |
"save_steps": 10,
|
25 |
-
"total_flos":
|
26 |
"train_batch_size": 8,
|
27 |
"trial_name": null,
|
28 |
"trial_params": null
|
|
|
1 |
{
|
2 |
+
"best_metric": 1.1490468978881836,
|
3 |
"best_model_checkpoint": "./mistral/29-02-24-Weni-test-folder-upload_Zeroshot-2_max_steps-30_batch_8_2024-02-29_ppid_7/checkpoint-10",
|
4 |
"epoch": 0.006199628022318661,
|
5 |
"eval_steps": 10,
|
|
|
10 |
"log_history": [
|
11 |
{
|
12 |
"epoch": 0.01,
|
13 |
+
"eval_loss": 1.1490468978881836,
|
14 |
+
"eval_runtime": 210.1155,
|
15 |
+
"eval_samples_per_second": 13.645,
|
16 |
+
"eval_steps_per_second": 3.412,
|
17 |
"step": 10
|
18 |
}
|
19 |
],
|
|
|
22 |
"num_input_tokens_seen": 0,
|
23 |
"num_train_epochs": 1,
|
24 |
"save_steps": 10,
|
25 |
+
"total_flos": 5248255688441856.0,
|
26 |
"train_batch_size": 8,
|
27 |
"trial_name": null,
|
28 |
"trial_params": null
|
checkpoint-10/training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 5112
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c185641aa778ebf322b648b5876f97d18fd7a50b2686547151fb1af1b5885465
|
3 |
size 5112
|