hzchng commited on
Commit
c34a7be
1 Parent(s): 55595d4

Saving train state of step 20

Browse files
checkpoint-20-epoch-3/model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aa6af6aaa725c126797fa2ede636e156f797aa228b518c733043223dfca3ccc2
3
  size 1711916448
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df43c6701a34b93f655952afb3197d0dc99ce26e2f794f425a65aceb823296c1
3
  size 1711916448
checkpoint-20-epoch-3/optimizer.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:67d5d452cbb8e8e5a3ae8e082a1d1d19b3d7a6b97e633b81585da81d66a4e461
3
  size 962406586
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd1f8b6539dd926fc9a78e0df2b502bbeb603a42705465370a0e2abd4117752f
3
  size 962406586
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "biodatlab/distill-whisper-th-medium",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": true,
@@ -43,7 +43,7 @@
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
46
- "torch_dtype": "float32",
47
  "transformers_version": "4.38.1",
48
  "use_cache": true,
49
  "use_weighted_layer_sum": false,
 
1
  {
2
+ "_name_or_path": "distil-whisper/training/distil-whisper-th-medium-4dec",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": true,
 
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
46
+ "torch_dtype": "float16",
47
  "transformers_version": "4.38.1",
48
  "use_cache": true,
49
  "use_weighted_layer_sum": false,
distillation.log CHANGED
The diff for this file is too large to render. See raw diff