bhattasp commited on
Commit
3e67a65
·
verified ·
1 Parent(s): f9051c6

End of training

Browse files
Files changed (2) hide show
  1. README.md +13 -13
  2. generation_config.json +2 -10
README.md CHANGED
@@ -1,4 +1,6 @@
1
  ---
 
 
2
  license: apache-2.0
3
  base_model: openai/whisper-tiny
4
  tags:
@@ -6,19 +8,19 @@ tags:
6
  metrics:
7
  - wer
8
  model-index:
9
- - name: whisper-finetuned-atco2_uwb
10
  results: []
11
  ---
12
 
13
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
14
  should probably proofread and complete it, then remove this comment. -->
15
 
16
- # whisper-finetuned-atco2_uwb
17
 
18
- This model is a fine-tuned version of [openai/whisper-tiny](https://huggingface.co/openai/whisper-tiny) on the None dataset.
19
  It achieves the following results on the evaluation set:
20
- - Loss: 0.4056
21
- - Wer: 0.4963
22
 
23
  ## Model description
24
 
@@ -41,20 +43,18 @@ The following hyperparameters were used during training:
41
  - train_batch_size: 16
42
  - eval_batch_size: 8
43
  - seed: 42
44
- - gradient_accumulation_steps: 2
45
- - total_train_batch_size: 32
46
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
47
  - lr_scheduler_type: linear
48
- - num_epochs: 3
 
49
  - mixed_precision_training: Native AMP
50
 
51
  ### Training results
52
 
53
- | Training Loss | Epoch | Step | Validation Loss | Wer |
54
- |:-------------:|:-----:|:----:|:---------------:|:------:|
55
- | No log | 1.0 | 274 | 0.4760 | 0.4111 |
56
- | 0.6433 | 2.0 | 548 | 0.4017 | 0.3959 |
57
- | 0.6433 | 3.0 | 822 | 0.3874 | 0.4127 |
58
 
59
 
60
  ### Framework versions
 
1
  ---
2
+ language:
3
+ - en
4
  license: apache-2.0
5
  base_model: openai/whisper-tiny
6
  tags:
 
8
  metrics:
9
  - wer
10
  model-index:
11
+ - name: bhattasp/whisper-finetuned-atco2_uwb
12
  results: []
13
  ---
14
 
15
  <!-- This model card has been generated automatically according to the information the Trainer had access to. You
16
  should probably proofread and complete it, then remove this comment. -->
17
 
18
+ # bhattasp/whisper-finetuned-atco2_uwb
19
 
20
+ This model is a fine-tuned version of [openai/whisper-tiny](https://huggingface.co/openai/whisper-tiny) on the AtCO2_UWB dataset.
21
  It achieves the following results on the evaluation set:
22
+ - Loss: 0.6113
23
+ - Wer: 25.7983
24
 
25
  ## Model description
26
 
 
43
  - train_batch_size: 16
44
  - eval_batch_size: 8
45
  - seed: 42
 
 
46
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
47
  - lr_scheduler_type: linear
48
+ - lr_scheduler_warmup_steps: 500
49
+ - training_steps: 2000
50
  - mixed_precision_training: Native AMP
51
 
52
  ### Training results
53
 
54
+ | Training Loss | Epoch | Step | Validation Loss | Wer |
55
+ |:-------------:|:------:|:----:|:---------------:|:-------:|
56
+ | 0.5831 | 1.8248 | 1000 | 0.6804 | 29.0874 |
57
+ | 0.3487 | 3.6496 | 2000 | 0.6113 | 25.7983 |
 
58
 
59
 
60
  ### Framework versions
generation_config.json CHANGED
@@ -32,16 +32,6 @@
32
  "bos_token_id": 50257,
33
  "decoder_start_token_id": 50258,
34
  "eos_token_id": 50257,
35
- "forced_decoder_ids": [
36
- [
37
- 1,
38
- null
39
- ],
40
- [
41
- 2,
42
- 50359
43
- ]
44
- ],
45
  "is_multilingual": true,
46
  "lang_to_id": {
47
  "<|af|>": 50327,
@@ -144,6 +134,7 @@
144
  "<|yo|>": 50325,
145
  "<|zh|>": 50260
146
  },
 
147
  "max_initial_timestamp_index": 50,
148
  "max_length": 448,
149
  "no_timestamps_token_id": 50363,
@@ -240,6 +231,7 @@
240
  50361,
241
  50362
242
  ],
 
243
  "task_to_id": {
244
  "transcribe": 50359,
245
  "translate": 50358
 
32
  "bos_token_id": 50257,
33
  "decoder_start_token_id": 50258,
34
  "eos_token_id": 50257,
 
 
 
 
 
 
 
 
 
 
35
  "is_multilingual": true,
36
  "lang_to_id": {
37
  "<|af|>": 50327,
 
134
  "<|yo|>": 50325,
135
  "<|zh|>": 50260
136
  },
137
+ "language": "english",
138
  "max_initial_timestamp_index": 50,
139
  "max_length": 448,
140
  "no_timestamps_token_id": 50363,
 
231
  50361,
232
  50362
233
  ],
234
+ "task": "transcribe",
235
  "task_to_id": {
236
  "transcribe": 50359,
237
  "translate": 50358