hzchng commited on
Commit
55595d4
1 Parent(s): a8740f1

Saving final weights of step 20

Browse files
Files changed (3) hide show
  1. config.json +2 -2
  2. distillation.log +5 -0
  3. model.safetensors +3 -0
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "distil-whisper/training/distil-whisper-th-medium-4dec",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": true,
@@ -43,7 +43,7 @@
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
46
- "torch_dtype": "float16",
47
  "transformers_version": "4.38.1",
48
  "use_cache": true,
49
  "use_weighted_layer_sum": false,
 
1
  {
2
+ "_name_or_path": "biodatlab/distill-whisper-th-medium",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": true,
 
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
46
+ "torch_dtype": "float32",
47
  "transformers_version": "4.38.1",
48
  "use_cache": true,
49
  "use_weighted_layer_sum": false,
distillation.log CHANGED
@@ -8566,3 +8566,8 @@ Processor WhisperProcessor:
8566
 
8567
 
8568
 
 
 
 
 
 
 
8566
 
8567
 
8568
 
8569
+ Some non-default generation parameters are set in the model config. These should go into a GenerationConfig file (https://huggingface.co/docs/transformers/generation_strategies#save-a-custom-decoding-strategy-with-your-model) instead. This warning will be raised to an exception in v4.41.
8570
+ Non-default generation parameters: {'max_length': 448, 'begin_suppress_tokens': [220, 50257]}
8571
+ Configuration saved in C:\Users\hzchng\Desktop\work\5-bjak\ai-voicebot\lib\whisper\output\config.json
8572
+ Configuration saved in C:\Users\hzchng\Desktop\work\5-bjak\ai-voicebot\lib\whisper\output\generation_config.json
8573
+ Model weights saved in C:\Users\hzchng\Desktop\work\5-bjak\ai-voicebot\lib\whisper\output\model.safetensors
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa6af6aaa725c126797fa2ede636e156f797aa228b518c733043223dfca3ccc2
3
+ size 1711916448