Saving train state of step 20
Browse files- checkpoint-20-epoch-3/model.safetensors +1 -1
- checkpoint-20-epoch-3/optimizer.bin +1 -1
- config.json +2 -2
- distillation.log +0 -0
checkpoint-20-epoch-3/model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 1711916448
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:df43c6701a34b93f655952afb3197d0dc99ce26e2f794f425a65aceb823296c1
|
3 |
size 1711916448
|
checkpoint-20-epoch-3/optimizer.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 962406586
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cd1f8b6539dd926fc9a78e0df2b502bbeb603a42705465370a0e2abd4117752f
|
3 |
size 962406586
|
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"activation_dropout": 0.0,
|
4 |
"activation_function": "gelu",
|
5 |
"apply_spec_augment": true,
|
@@ -43,7 +43,7 @@
|
|
43 |
"num_mel_bins": 80,
|
44 |
"pad_token_id": 50257,
|
45 |
"scale_embedding": false,
|
46 |
-
"torch_dtype": "
|
47 |
"transformers_version": "4.38.1",
|
48 |
"use_cache": true,
|
49 |
"use_weighted_layer_sum": false,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "distil-whisper/training/distil-whisper-th-medium-4dec",
|
3 |
"activation_dropout": 0.0,
|
4 |
"activation_function": "gelu",
|
5 |
"apply_spec_augment": true,
|
|
|
43 |
"num_mel_bins": 80,
|
44 |
"pad_token_id": 50257,
|
45 |
"scale_embedding": false,
|
46 |
+
"torch_dtype": "float16",
|
47 |
"transformers_version": "4.38.1",
|
48 |
"use_cache": true,
|
49 |
"use_weighted_layer_sum": false,
|
distillation.log
CHANGED
The diff for this file is too large to render.
See raw diff
|
|