diff --git "a/exp/log/log-train-2023-04-26-10-07-06-0" "b/exp/log/log-train-2023-04-26-10-07-06-0" new file mode 100644--- /dev/null +++ "b/exp/log/log-train-2023-04-26-10-07-06-0" @@ -0,0 +1,23767 @@ +2023-04-26 10:07:06,639 INFO [finetune.py:1046] (0/7) Training started +2023-04-26 10:07:06,641 INFO [finetune.py:1056] (0/7) Device: cuda:0 +2023-04-26 10:07:06,642 INFO [finetune.py:1065] (0/7) {'frame_shift_ms': 10.0, 'allowed_excess_duration_ratio': 0.1, 'best_train_loss': inf, 'best_valid_loss': inf, 'best_train_epoch': -1, 'best_valid_epoch': -1, 'batch_idx_train': 0, 'log_interval': 50, 'reset_interval': 200, 'valid_interval': 3000, 'feature_dim': 80, 'subsampling_factor': 4, 'warm_step': 2000, 'env_info': {'k2-version': '1.23.4', 'k2-build-type': 'Release', 'k2-with-cuda': True, 'k2-git-sha1': '62e404dd3f3a811d73e424199b3408e309c06e1a', 'k2-git-date': 'Mon Jan 30 02:26:16 2023', 'lhotse-version': '1.12.0.dev+git.3ccfeb7.clean', 'torch-version': '1.13.0', 'torch-cuda-available': True, 'torch-cuda-version': '11.7', 'python-version': '3.8', 'icefall-git-branch': 'master', 'icefall-git-sha1': 'd74822d-dirty', 'icefall-git-date': 'Tue Mar 21 21:35:32 2023', 'icefall-path': '/home/lishaojie/icefall', 'k2-path': '/home/lishaojie/.conda/envs/env_lishaojie/lib/python3.8/site-packages/k2/__init__.py', 'lhotse-path': '/home/lishaojie/.conda/envs/env_lishaojie/lib/python3.8/site-packages/lhotse/__init__.py', 'hostname': 'cnc533', 'IP address': '127.0.1.1'}, 'world_size': 7, 'master_port': 18181, 'tensorboard': True, 'num_epochs': 30, 'start_epoch': 1, 'start_batch': 0, 'exp_dir': PosixPath('pruned_transducer_stateless7_streaming/exp2'), 'bpe_model': 'data/lang_bpe_500/bpe.model', 'base_lr': 0.004, 'lr_batches': 100000.0, 'lr_epochs': 100.0, 'context_size': 2, 'prune_range': 5, 'lm_scale': 0.25, 'am_scale': 0.0, 'simple_loss_scale': 0.5, 'seed': 42, 'print_diagnostics': False, 'inf_check': False, 'save_every_n': 2000, 'keep_last_k': 30, 'average_period': 200, 'use_fp16': True, 'num_encoder_layers': '2,4,3,2,4', 'feedforward_dims': '1024,1024,2048,2048,1024', 'nhead': '8,8,8,8,8', 'encoder_dims': '384,384,384,384,384', 'attention_dims': '192,192,192,192,192', 'encoder_unmasked_dims': '256,256,256,256,256', 'zipformer_downsampling_factors': '1,2,4,8,2', 'cnn_module_kernels': '31,31,31,31,31', 'decoder_dim': 512, 'joiner_dim': 512, 'do_finetune': True, 'init_modules': 'encoder', 'finetune_ckpt': '/home/lishaojie/icefall/egs/commonvoice_fr/ASR/pruned_transducer_stateless7_streaming/exp/english_pretrain/pretrained.pt', 'manifest_dir': PosixPath('data/fbank'), 'max_duration': 200, 'bucketing_sampler': True, 'num_buckets': 30, 'concatenate_cuts': False, 'duration_factor': 1.0, 'gap': 1.0, 'on_the_fly_feats': False, 'shuffle': True, 'drop_last': True, 'return_cuts': True, 'num_workers': 2, 'enable_spec_aug': True, 'spec_aug_time_warp_factor': 80, 'enable_musan': True, 'input_strategy': 'PrecomputedFeatures', 'blank_id': 0, 'vocab_size': 500} +2023-04-26 10:07:06,642 INFO [finetune.py:1067] (0/7) About to create model +2023-04-26 10:07:07,015 INFO [zipformer.py:405] (0/7) At encoder stack 4, which has downsampling_factor=2, we will combine the outputs of layers 1 and 3, with downsampling_factors=2 and 8. +2023-04-26 10:07:07,025 INFO [finetune.py:1071] (0/7) Number of model parameters: 70369391 +2023-04-26 10:07:07,344 INFO [finetune.py:626] (0/7) Loading checkpoint from /home/lishaojie/icefall/egs/commonvoice_fr/ASR/pruned_transducer_stateless7_streaming/exp/english_pretrain/pretrained.pt +2023-04-26 10:07:07,479 INFO [finetune.py:647] (0/7) Loading parameters starting with prefix encoder +2023-04-26 10:07:08,673 INFO [finetune.py:1093] (0/7) Using DDP +2023-04-26 10:07:09,244 INFO [commonvoice_fr.py:392] (0/7) About to get train cuts +2023-04-26 10:07:09,247 INFO [commonvoice_fr.py:218] (0/7) Enable MUSAN +2023-04-26 10:07:09,247 INFO [commonvoice_fr.py:219] (0/7) About to get Musan cuts +2023-04-26 10:07:10,976 INFO [commonvoice_fr.py:243] (0/7) Enable SpecAugment +2023-04-26 10:07:10,976 INFO [commonvoice_fr.py:244] (0/7) Time warp factor: 80 +2023-04-26 10:07:10,976 INFO [commonvoice_fr.py:254] (0/7) Num frame mask: 10 +2023-04-26 10:07:10,976 INFO [commonvoice_fr.py:267] (0/7) About to create train dataset +2023-04-26 10:07:10,976 INFO [commonvoice_fr.py:294] (0/7) Using DynamicBucketingSampler. +2023-04-26 10:07:13,483 INFO [commonvoice_fr.py:309] (0/7) About to create train dataloader +2023-04-26 10:07:13,483 INFO [commonvoice_fr.py:399] (0/7) About to get dev cuts +2023-04-26 10:07:13,484 INFO [commonvoice_fr.py:340] (0/7) About to create dev dataset +2023-04-26 10:07:13,888 INFO [commonvoice_fr.py:357] (0/7) About to create dev dataloader +2023-04-26 10:07:13,888 INFO [finetune.py:1289] (0/7) Sanity check -- see if any of the batches in epoch 1 would cause OOM. +2023-04-26 10:11:06,929 INFO [finetune.py:1317] (0/7) Maximum memory allocated so far is 5191MB +2023-04-26 10:11:07,626 INFO [finetune.py:1317] (0/7) Maximum memory allocated so far is 5672MB +2023-04-26 10:11:08,314 INFO [finetune.py:1317] (0/7) Maximum memory allocated so far is 5672MB +2023-04-26 10:11:08,988 INFO [finetune.py:1317] (0/7) Maximum memory allocated so far is 5672MB +2023-04-26 10:11:09,670 INFO [finetune.py:1317] (0/7) Maximum memory allocated so far is 5672MB +2023-04-26 10:11:10,372 INFO [finetune.py:1317] (0/7) Maximum memory allocated so far is 5672MB +2023-04-26 10:11:20,025 INFO [finetune.py:976] (0/7) Epoch 1, batch 0, loss[loss=7.434, simple_loss=6.736, pruned_loss=6.961, over 4828.00 frames. ], tot_loss[loss=7.434, simple_loss=6.736, pruned_loss=6.961, over 4828.00 frames. ], batch size: 25, lr: 2.00e-03, grad_scale: 2.0 +2023-04-26 10:11:20,027 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 10:11:40,207 INFO [finetune.py:1010] (0/7) Epoch 1, validation: loss=7.31, simple_loss=6.623, pruned_loss=6.857, over 2265189.00 frames. +2023-04-26 10:11:40,208 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 5672MB +2023-04-26 10:11:48,672 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5.0, num_to_drop=2, layers_to_drop={1, 2} +2023-04-26 10:12:02,413 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=3.46 vs. limit=2.0 +2023-04-26 10:12:11,181 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=23.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:12:43,283 INFO [finetune.py:976] (0/7) Epoch 1, batch 50, loss[loss=2.628, simple_loss=2.502, pruned_loss=1.281, over 4893.00 frames. ], tot_loss[loss=4.464, simple_loss=4.04, pruned_loss=4.099, over 217796.55 frames. ], batch size: 32, lr: 2.20e-03, grad_scale: 0.00390625 +2023-04-26 10:12:44,463 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=27.23 vs. limit=5.0 +2023-04-26 10:13:19,823 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:13:40,194 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=213.14 vs. limit=5.0 +2023-04-26 10:13:40,721 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=23.95 vs. limit=5.0 +2023-04-26 10:13:41,115 WARNING [finetune.py:966] (0/7) Grad scale is small: 6.103515625e-05 +2023-04-26 10:13:41,115 INFO [finetune.py:976] (0/7) Epoch 1, batch 100, loss[loss=2.321, simple_loss=2.183, pruned_loss=1.328, over 4862.00 frames. ], tot_loss[loss=3.633, simple_loss=3.359, pruned_loss=2.683, over 380436.79 frames. ], batch size: 34, lr: 2.40e-03, grad_scale: 0.0001220703125 +2023-04-26 10:14:03,496 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 4.683e+02 1.507e+03 7.833e+03 2.572e+04 3.214e+07, threshold=1.567e+04, percent-clipped=0.0 +2023-04-26 10:14:06,180 WARNING [optim.py:389] (0/7) Scaling gradients by 0.014711554162204266, model_norm_threshold=15666.9306640625 +2023-04-26 10:14:06,251 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.88, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.001e+12, grad_sumsq = 2.310e+12, orig_rms_sq=4.331e-01 +2023-04-26 10:14:23,410 WARNING [optim.py:389] (0/7) Scaling gradients by 0.00018281130178365856, model_norm_threshold=15666.9306640625 +2023-04-26 10:14:23,480 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.29, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=2.155e+15, grad_sumsq = 4.978e+15, orig_rms_sq=4.329e-01 +2023-04-26 10:14:23,586 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0947, 3.3018, 2.9616, 4.0076, 2.8463, 2.7770, 1.7425, 3.2767], + device='cuda:0'), covar=tensor([0.0103, 0.0078, 0.0075, 0.0030, 0.0107, 0.0096, 0.0111, 0.0086], + device='cuda:0'), in_proj_covar=tensor([0.0205, 0.0212, 0.0191, 0.0172, 0.0174, 0.0191, 0.0167, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 10:14:24,116 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144.0, num_to_drop=2, layers_to_drop={1, 3} +2023-04-26 10:14:27,645 INFO [finetune.py:976] (0/7) Epoch 1, batch 150, loss[loss=1.745, simple_loss=1.579, pruned_loss=1.342, over 4771.00 frames. ], tot_loss[loss=3.034, simple_loss=2.812, pruned_loss=2.123, over 508368.86 frames. ], batch size: 28, lr: 2.60e-03, grad_scale: 3.0517578125e-05 +2023-04-26 10:14:28,149 WARNING [optim.py:389] (0/7) Scaling gradients by 0.00022292081848718226, model_norm_threshold=15666.9306640625 +2023-04-26 10:14:28,220 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.45, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=2.213e+15, grad_sumsq = 5.111e+15, orig_rms_sq=4.330e-01 +2023-04-26 10:14:40,638 WARNING [optim.py:389] (0/7) Scaling gradients by 0.05655747279524803, model_norm_threshold=15666.9306640625 +2023-04-26 10:14:40,708 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.84, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=6.482e+10, grad_sumsq = 1.497e+11, orig_rms_sq=4.330e-01 +2023-04-26 10:14:50,203 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=13.84 vs. limit=5.0 +2023-04-26 10:14:56,529 WARNING [finetune.py:966] (0/7) Grad scale is small: 3.0517578125e-05 +2023-04-26 10:14:56,530 INFO [finetune.py:976] (0/7) Epoch 1, batch 200, loss[loss=1.248, simple_loss=1.075, pruned_loss=1.189, over 4899.00 frames. ], tot_loss[loss=2.494, simple_loss=2.291, pruned_loss=1.821, over 608735.69 frames. ], batch size: 32, lr: 2.80e-03, grad_scale: 6.103515625e-05 +2023-04-26 10:15:07,788 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 6.387e+01 5.300e+02 1.840e+03 7.547e+03 8.570e+07, threshold=3.680e+03, percent-clipped=20.0 +2023-04-26 10:15:10,049 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=2.85 vs. limit=2.0 +2023-04-26 10:15:12,966 WARNING [optim.py:389] (0/7) Scaling gradients by 0.011872046627104282, model_norm_threshold=3679.54541015625 +2023-04-26 10:15:13,034 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.57, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=5.451e+10, grad_sumsq = 1.259e+11, orig_rms_sq=4.329e-01 +2023-04-26 10:15:15,301 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=19.42 vs. limit=5.0 +2023-04-26 10:15:16,162 WARNING [optim.py:389] (0/7) Scaling gradients by 0.08515117317438126, model_norm_threshold=3679.54541015625 +2023-04-26 10:15:16,230 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.79, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.483e+09, grad_sumsq = 3.425e+09, orig_rms_sq=4.329e-01 +2023-04-26 10:15:16,776 WARNING [optim.py:389] (0/7) Scaling gradients by 0.04552413150668144, model_norm_threshold=3679.54541015625 +2023-04-26 10:15:16,843 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.84, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=5.493e+09, grad_sumsq = 1.269e+10, orig_rms_sq=4.329e-01 +2023-04-26 10:15:19,138 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=2.15 vs. limit=2.0 +2023-04-26 10:15:20,630 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=21.77 vs. limit=5.0 +2023-04-26 10:15:25,654 INFO [finetune.py:976] (0/7) Epoch 1, batch 250, loss[loss=1.101, simple_loss=0.9331, pruned_loss=1.062, over 4736.00 frames. ], tot_loss[loss=2.136, simple_loss=1.937, pruned_loss=1.651, over 686243.73 frames. ], batch size: 23, lr: 3.00e-03, grad_scale: 6.103515625e-05 +2023-04-26 10:15:50,809 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=296.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:15:52,837 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=300.0, num_to_drop=2, layers_to_drop={0, 2} +2023-04-26 10:15:53,282 WARNING [finetune.py:966] (0/7) Grad scale is small: 6.103515625e-05 +2023-04-26 10:15:53,283 INFO [finetune.py:976] (0/7) Epoch 1, batch 300, loss[loss=1.381, simple_loss=1.151, pruned_loss=1.346, over 4862.00 frames. ], tot_loss[loss=1.909, simple_loss=1.706, pruned_loss=1.548, over 747716.10 frames. ], batch size: 31, lr: 3.20e-03, grad_scale: 0.0001220703125 +2023-04-26 10:16:02,527 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 2.002e+01 7.343e+01 2.302e+02 1.070e+03 3.099e+05, threshold=4.604e+02, percent-clipped=16.0 +2023-04-26 10:16:14,415 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=29.77 vs. limit=5.0 +2023-04-26 10:16:21,499 INFO [finetune.py:976] (0/7) Epoch 1, batch 350, loss[loss=1.222, simple_loss=1.006, pruned_loss=1.18, over 4812.00 frames. ], tot_loss[loss=1.741, simple_loss=1.535, pruned_loss=1.463, over 791453.31 frames. ], batch size: 25, lr: 3.40e-03, grad_scale: 0.0001220703125 +2023-04-26 10:16:24,661 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=357.0, num_to_drop=2, layers_to_drop={1, 2} +2023-04-26 10:16:42,459 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=387.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:16:56,318 WARNING [finetune.py:966] (0/7) Grad scale is small: 0.0001220703125 +2023-04-26 10:16:56,318 INFO [finetune.py:976] (0/7) Epoch 1, batch 400, loss[loss=1.122, simple_loss=0.8942, pruned_loss=1.13, over 4772.00 frames. ], tot_loss[loss=1.623, simple_loss=1.408, pruned_loss=1.408, over 828458.37 frames. ], batch size: 25, lr: 3.60e-03, grad_scale: 0.000244140625 +2023-04-26 10:17:16,625 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.830e+01 2.642e+01 7.119e+01 5.229e+02 3.680e+03, threshold=1.424e+02, percent-clipped=26.0 +2023-04-26 10:17:27,882 WARNING [optim.py:389] (0/7) Scaling gradients by 0.02133115753531456, model_norm_threshold=142.37583923339844 +2023-04-26 10:17:27,958 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.81, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=3.619e+07, grad_sumsq = 8.362e+07, orig_rms_sq=4.328e-01 +2023-04-26 10:17:40,174 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=3.54 vs. limit=2.0 +2023-04-26 10:17:41,636 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=439.0, num_to_drop=2, layers_to_drop={0, 1} +2023-04-26 10:17:52,436 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=448.0, num_to_drop=2, layers_to_drop={0, 2} +2023-04-26 10:17:53,957 INFO [finetune.py:976] (0/7) Epoch 1, batch 450, loss[loss=1.186, simple_loss=0.9338, pruned_loss=1.181, over 4814.00 frames. ], tot_loss[loss=1.517, simple_loss=1.294, pruned_loss=1.356, over 856441.49 frames. ], batch size: 40, lr: 3.80e-03, grad_scale: 0.000244140625 +2023-04-26 10:17:54,654 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.66 vs. limit=2.0 +2023-04-26 10:18:14,586 WARNING [optim.py:389] (0/7) Scaling gradients by 0.06225070729851723, model_norm_threshold=142.37583923339844 +2023-04-26 10:18:14,660 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.68, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=3.535e+06, grad_sumsq = 8.167e+06, orig_rms_sq=4.328e-01 +2023-04-26 10:18:22,849 WARNING [finetune.py:966] (0/7) Grad scale is small: 0.000244140625 +2023-04-26 10:18:22,849 INFO [finetune.py:976] (0/7) Epoch 1, batch 500, loss[loss=1.239, simple_loss=0.9435, pruned_loss=1.271, over 4869.00 frames. ], tot_loss[loss=1.422, simple_loss=1.192, pruned_loss=1.302, over 876545.07 frames. ], batch size: 32, lr: 4.00e-03, grad_scale: 0.00048828125 +2023-04-26 10:18:28,289 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=13.36 vs. limit=5.0 +2023-04-26 10:18:31,737 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.709e+01 2.273e+01 3.115e+01 1.071e+02 6.675e+03, threshold=6.230e+01, percent-clipped=18.0 +2023-04-26 10:18:31,896 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=13.84 vs. limit=5.0 +2023-04-26 10:18:47,429 WARNING [optim.py:389] (0/7) Scaling gradients by 0.017591100186109543, model_norm_threshold=62.30100631713867 +2023-04-26 10:18:47,503 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.35, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.348e+06, grad_sumsq = 1.005e+07, orig_rms_sq=4.327e-01 +2023-04-26 10:18:56,302 WARNING [optim.py:389] (0/7) Scaling gradients by 0.005508477333933115, model_norm_threshold=62.30100631713867 +2023-04-26 10:18:56,374 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.80, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.024e+08, grad_sumsq = 2.367e+08, orig_rms_sq=4.327e-01 +2023-04-26 10:18:58,479 INFO [finetune.py:976] (0/7) Epoch 1, batch 550, loss[loss=1.1, simple_loss=0.8286, pruned_loss=1.108, over 4817.00 frames. ], tot_loss[loss=1.342, simple_loss=1.103, pruned_loss=1.256, over 896413.17 frames. ], batch size: 41, lr: 4.00e-03, grad_scale: 0.00048828125 +2023-04-26 10:19:04,277 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=562.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:19:13,096 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=568.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:19:35,153 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=590.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:19:45,891 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=600.0, num_to_drop=2, layers_to_drop={1, 2} +2023-04-26 10:19:46,331 WARNING [finetune.py:966] (0/7) Grad scale is small: 0.00048828125 +2023-04-26 10:19:46,331 INFO [finetune.py:976] (0/7) Epoch 1, batch 600, loss[loss=1.066, simple_loss=0.7911, pruned_loss=1.062, over 4768.00 frames. ], tot_loss[loss=1.288, simple_loss=1.038, pruned_loss=1.226, over 909590.78 frames. ], batch size: 26, lr: 4.00e-03, grad_scale: 0.0009765625 +2023-04-26 10:19:58,200 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=3.40 vs. limit=2.0 +2023-04-26 10:20:05,870 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-26 10:20:06,710 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.706e+01 2.227e+01 2.629e+01 6.305e+01 1.131e+04, threshold=5.258e+01, percent-clipped=26.0 +2023-04-26 10:20:09,392 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=623.0, num_to_drop=2, layers_to_drop={0, 1} +2023-04-26 10:20:18,147 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=629.0, num_to_drop=2, layers_to_drop={1, 3} +2023-04-26 10:20:29,917 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=648.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:20:31,452 INFO [finetune.py:976] (0/7) Epoch 1, batch 650, loss[loss=1.128, simple_loss=0.8227, pruned_loss=1.118, over 4907.00 frames. ], tot_loss[loss=1.252, simple_loss=0.9899, pruned_loss=1.205, over 919877.90 frames. ], batch size: 43, lr: 4.00e-03, grad_scale: 0.0009765625 +2023-04-26 10:20:31,529 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=651.0, num_to_drop=2, layers_to_drop={0, 2} +2023-04-26 10:20:32,037 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=652.0, num_to_drop=2, layers_to_drop={1, 3} +2023-04-26 10:20:41,810 WARNING [optim.py:389] (0/7) Scaling gradients by 0.07653743773698807, model_norm_threshold=52.5806770324707 +2023-04-26 10:20:41,886 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.93, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=4.392e+05, grad_sumsq = 1.015e+06, orig_rms_sq=4.327e-01 +2023-04-26 10:20:59,048 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=79.15 vs. limit=5.0 +2023-04-26 10:20:59,956 WARNING [finetune.py:966] (0/7) Grad scale is small: 0.0009765625 +2023-04-26 10:20:59,956 INFO [finetune.py:976] (0/7) Epoch 1, batch 700, loss[loss=0.9933, simple_loss=0.7115, pruned_loss=0.9785, over 4796.00 frames. ], tot_loss[loss=1.219, simple_loss=0.9465, pruned_loss=1.179, over 927502.36 frames. ], batch size: 25, lr: 4.00e-03, grad_scale: 0.001953125 +2023-04-26 10:21:09,245 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.790e+01 2.165e+01 2.490e+01 3.193e+01 6.870e+02, threshold=4.979e+01, percent-clipped=6.0 +2023-04-26 10:21:20,038 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=739.0, num_to_drop=2, layers_to_drop={0, 1} +2023-04-26 10:21:22,068 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=743.0, num_to_drop=2, layers_to_drop={0, 1} +2023-04-26 10:21:28,227 INFO [finetune.py:976] (0/7) Epoch 1, batch 750, loss[loss=1.125, simple_loss=0.8082, pruned_loss=1.07, over 4783.00 frames. ], tot_loss[loss=1.191, simple_loss=0.9099, pruned_loss=1.152, over 935368.95 frames. ], batch size: 51, lr: 4.00e-03, grad_scale: 0.001953125 +2023-04-26 10:21:35,180 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=2.44 vs. limit=2.0 +2023-04-26 10:21:48,059 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=787.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:21:53,224 WARNING [optim.py:389] (0/7) Scaling gradients by 0.039711207151412964, model_norm_threshold=49.79251480102539 +2023-04-26 10:21:53,298 INFO [optim.py:451] (0/7) Parameter Dominanting tot_sumsq module.encoder.encoder_embed.conv.0.weight with proportion 0.81, where dominant_sumsq=(grad_sumsq*orig_rms_sq)=1.271e+06, grad_sumsq = 2.939e+06, orig_rms_sq=4.325e-01 +2023-04-26 10:21:55,913 WARNING [finetune.py:966] (0/7) Grad scale is small: 0.001953125 +2023-04-26 10:21:55,913 INFO [finetune.py:976] (0/7) Epoch 1, batch 800, loss[loss=1.071, simple_loss=0.7747, pruned_loss=0.9812, over 4750.00 frames. ], tot_loss[loss=1.161, simple_loss=0.8751, pruned_loss=1.118, over 939701.59 frames. ], batch size: 54, lr: 4.00e-03, grad_scale: 0.00390625 +2023-04-26 10:22:11,120 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 2.049e+01 2.360e+01 2.808e+01 3.468e+01 1.254e+03, threshold=5.615e+01, percent-clipped=6.0 +2023-04-26 10:22:26,848 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=847.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:22:26,871 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.4573, 2.2014, 2.5964, 2.8061, 2.3058, 2.4088, 2.0869, 2.3848], + device='cuda:0'), covar=tensor([0.0407, 0.1029, 0.0566, 0.0495, 0.0675, 0.0393, 0.0586, 0.0417], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0503, 0.0405, 0.0394, 0.0438, 0.0443, 0.0492, 0.0428], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 10:22:28,870 INFO [finetune.py:976] (0/7) Epoch 1, batch 850, loss[loss=0.9537, simple_loss=0.6736, pruned_loss=0.8768, over 4749.00 frames. ], tot_loss[loss=1.129, simple_loss=0.8416, pruned_loss=1.077, over 943383.04 frames. ], batch size: 27, lr: 4.00e-03, grad_scale: 0.00390625 +2023-04-26 10:22:37,630 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7356, 1.7256, 1.7115, 2.3154, 1.2438, 3.5443, 4.0645, 2.3631], + device='cuda:0'), covar=tensor([0.0534, 0.0724, 0.0962, 0.0645, 0.0668, 0.0202, 0.0281, 0.0504], + device='cuda:0'), in_proj_covar=tensor([0.0508, 0.0578, 0.0651, 0.0630, 0.0560, 0.0614, 0.0646, 0.0636], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 10:23:15,920 WARNING [finetune.py:966] (0/7) Grad scale is small: 0.00390625 +2023-04-26 10:23:15,920 INFO [finetune.py:976] (0/7) Epoch 1, batch 900, loss[loss=0.9541, simple_loss=0.6818, pruned_loss=0.8425, over 4771.00 frames. ], tot_loss[loss=1.095, simple_loss=0.8088, pruned_loss=1.033, over 945719.11 frames. ], batch size: 28, lr: 4.00e-03, grad_scale: 0.0078125 +2023-04-26 10:23:20,200 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=908.0, num_to_drop=2, layers_to_drop={2, 3} +2023-04-26 10:23:26,896 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 2.132e+01 2.534e+01 2.883e+01 3.456e+01 6.931e+01, threshold=5.766e+01, percent-clipped=4.0 +2023-04-26 10:23:26,975 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=918.0, num_to_drop=2, layers_to_drop={0, 2} +2023-04-26 10:23:30,108 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=924.0, num_to_drop=2, layers_to_drop={1, 2} +2023-04-26 10:23:38,116 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=2.16 vs. limit=2.0 +2023-04-26 10:23:42,106 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=946.0, num_to_drop=2, layers_to_drop={2, 3} +2023-04-26 10:23:44,693 INFO [finetune.py:976] (0/7) Epoch 1, batch 950, loss[loss=1.028, simple_loss=0.7325, pruned_loss=0.8903, over 4906.00 frames. ], tot_loss[loss=1.071, simple_loss=0.785, pruned_loss=0.9949, over 948978.69 frames. ], batch size: 32, lr: 4.00e-03, grad_scale: 0.0078125 +2023-04-26 10:23:45,274 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=952.0, num_to_drop=2, layers_to_drop={0, 2} +2023-04-26 10:24:20,179 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=41.34 vs. limit=5.0 +2023-04-26 10:24:41,987 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=1000.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:24:42,450 WARNING [finetune.py:966] (0/7) Grad scale is small: 0.0078125 +2023-04-26 10:24:42,451 INFO [finetune.py:976] (0/7) Epoch 1, batch 1000, loss[loss=1.033, simple_loss=0.7314, pruned_loss=0.8797, over 4894.00 frames. ], tot_loss[loss=1.062, simple_loss=0.7738, pruned_loss=0.9699, over 950248.18 frames. ], batch size: 32, lr: 4.00e-03, grad_scale: 0.015625 +2023-04-26 10:25:00,172 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 2.486e+01 3.090e+01 3.594e+01 4.276e+01 7.170e+01, threshold=7.188e+01, percent-clipped=7.0 +2023-04-26 10:25:05,642 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=18.60 vs. limit=5.0 +2023-04-26 10:25:14,556 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1043.0, num_to_drop=2, layers_to_drop={0, 1} +2023-04-26 10:25:18,748 INFO [finetune.py:976] (0/7) Epoch 1, batch 1050, loss[loss=1.09, simple_loss=0.7791, pruned_loss=0.8991, over 4906.00 frames. ], tot_loss[loss=1.061, simple_loss=0.7691, pruned_loss=0.952, over 950945.76 frames. ], batch size: 43, lr: 4.00e-03, grad_scale: 0.015625 +2023-04-26 10:25:22,588 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0430, 2.1410, 1.3255, 1.8142, 1.9049, 1.3620, 2.6741, 1.1399], + device='cuda:0'), covar=tensor([0.0665, 0.0454, 0.0694, 0.0631, 0.0479, 0.0484, 0.0393, 0.0724], + device='cuda:0'), in_proj_covar=tensor([0.0383, 0.0395, 0.0476, 0.0410, 0.0457, 0.0429, 0.0447, 0.0458], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 10:25:42,476 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=1091.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:25:48,293 INFO [finetune.py:976] (0/7) Epoch 1, batch 1100, loss[loss=0.9162, simple_loss=0.6441, pruned_loss=0.7548, over 4805.00 frames. ], tot_loss[loss=1.056, simple_loss=0.7609, pruned_loss=0.9301, over 953436.12 frames. ], batch size: 25, lr: 4.00e-03, grad_scale: 0.03125 +2023-04-26 10:25:57,277 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 2.855e+01 3.563e+01 4.845e+01 6.131e+01 1.271e+02, threshold=9.690e+01, percent-clipped=11.0 +2023-04-26 10:26:16,868 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=9.54 vs. limit=5.0 +2023-04-26 10:26:17,721 INFO [finetune.py:976] (0/7) Epoch 1, batch 1150, loss[loss=0.8933, simple_loss=0.6195, pruned_loss=0.7316, over 4776.00 frames. ], tot_loss[loss=1.043, simple_loss=0.7486, pruned_loss=0.9041, over 952515.19 frames. ], batch size: 26, lr: 4.00e-03, grad_scale: 0.03125 +2023-04-26 10:26:18,901 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1153.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:26:46,980 INFO [finetune.py:976] (0/7) Epoch 1, batch 1200, loss[loss=0.8912, simple_loss=0.6286, pruned_loss=0.7048, over 4750.00 frames. ], tot_loss[loss=1.032, simple_loss=0.7372, pruned_loss=0.8797, over 953810.22 frames. ], batch size: 54, lr: 4.00e-03, grad_scale: 0.0625 +2023-04-26 10:26:48,589 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1203.0, num_to_drop=2, layers_to_drop={2, 3} +2023-04-26 10:26:54,317 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1214.0, num_to_drop=2, layers_to_drop={1, 3} +2023-04-26 10:26:56,334 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 2.920e+01 3.405e+01 3.864e+01 4.670e+01 1.171e+02, threshold=7.728e+01, percent-clipped=2.0 +2023-04-26 10:26:56,423 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1218.0, num_to_drop=2, layers_to_drop={0, 1} +2023-04-26 10:26:59,572 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1224.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 10:27:08,443 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=2.41 vs. limit=2.0 +2023-04-26 10:27:19,078 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1246.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 10:27:21,700 INFO [finetune.py:976] (0/7) Epoch 1, batch 1250, loss[loss=1.014, simple_loss=0.713, pruned_loss=0.7906, over 4809.00 frames. ], tot_loss[loss=1.017, simple_loss=0.7235, pruned_loss=0.8538, over 954494.61 frames. ], batch size: 39, lr: 4.00e-03, grad_scale: 0.0625 +2023-04-26 10:27:40,173 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=1266.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:27:49,523 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=1272.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:27:51,797 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=13.56 vs. limit=5.0 +2023-04-26 10:28:15,539 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=1294.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:28:24,920 INFO [finetune.py:976] (0/7) Epoch 1, batch 1300, loss[loss=1.029, simple_loss=0.7157, pruned_loss=0.7971, over 4867.00 frames. ], tot_loss[loss=1.002, simple_loss=0.7095, pruned_loss=0.8282, over 955039.50 frames. ], batch size: 34, lr: 4.00e-03, grad_scale: 0.125 +2023-04-26 10:28:40,245 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 3.024e+01 3.776e+01 4.391e+01 5.836e+01 1.199e+02, threshold=8.782e+01, percent-clipped=8.0 +2023-04-26 10:29:00,692 INFO [finetune.py:976] (0/7) Epoch 1, batch 1350, loss[loss=0.8781, simple_loss=0.6011, pruned_loss=0.6781, over 4050.00 frames. ], tot_loss[loss=1.001, simple_loss=0.7047, pruned_loss=0.8155, over 955247.62 frames. ], batch size: 17, lr: 4.00e-03, grad_scale: 0.125 +2023-04-26 10:29:09,501 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=2.54 vs. limit=2.0 +2023-04-26 10:29:21,025 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5589, 1.6983, 1.4623, 1.1259, 1.2432, 1.2529, 1.4288, 1.2460], + device='cuda:0'), covar=tensor([0.1343, 0.1598, 0.1395, 0.2281, 0.2100, 0.1930, 0.0951, 0.1629], + device='cuda:0'), in_proj_covar=tensor([0.0224, 0.0255, 0.0225, 0.0248, 0.0263, 0.0220, 0.0213, 0.0236], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003, 0.0004], + device='cuda:0') +2023-04-26 10:29:52,993 INFO [finetune.py:976] (0/7) Epoch 1, batch 1400, loss[loss=1.086, simple_loss=0.7471, pruned_loss=0.8227, over 4898.00 frames. ], tot_loss[loss=1.012, simple_loss=0.7086, pruned_loss=0.8126, over 954903.42 frames. ], batch size: 36, lr: 4.00e-03, grad_scale: 0.25 +2023-04-26 10:30:14,312 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 3.571e+01 4.693e+01 6.414e+01 7.905e+01 1.778e+02, threshold=1.283e+02, percent-clipped=17.0 +2023-04-26 10:30:34,176 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1434.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:30:55,950 INFO [finetune.py:976] (0/7) Epoch 1, batch 1450, loss[loss=0.8673, simple_loss=0.592, pruned_loss=0.6509, over 4188.00 frames. ], tot_loss[loss=1.019, simple_loss=0.7102, pruned_loss=0.8067, over 954915.56 frames. ], batch size: 18, lr: 4.00e-03, grad_scale: 0.25 +2023-04-26 10:31:42,620 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1495.0, num_to_drop=2, layers_to_drop={0, 1} +2023-04-26 10:31:53,098 INFO [finetune.py:976] (0/7) Epoch 1, batch 1500, loss[loss=0.8353, simple_loss=0.5694, pruned_loss=0.6183, over 4286.00 frames. ], tot_loss[loss=1.019, simple_loss=0.7082, pruned_loss=0.7955, over 954076.62 frames. ], batch size: 19, lr: 4.00e-03, grad_scale: 0.5 +2023-04-26 10:31:59,748 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1503.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 10:32:03,094 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1509.0, num_to_drop=2, layers_to_drop={2, 3} +2023-04-26 10:32:13,076 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 3.937e+01 5.249e+01 6.518e+01 8.001e+01 1.317e+02, threshold=1.304e+02, percent-clipped=3.0 +2023-04-26 10:32:48,628 INFO [finetune.py:976] (0/7) Epoch 1, batch 1550, loss[loss=0.9143, simple_loss=0.635, pruned_loss=0.6585, over 4842.00 frames. ], tot_loss[loss=1.012, simple_loss=0.703, pruned_loss=0.778, over 956409.60 frames. ], batch size: 44, lr: 4.00e-03, grad_scale: 0.5 +2023-04-26 10:32:48,701 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=1551.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:33:08,373 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1566.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:33:31,753 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3320, 0.9609, 1.0254, 1.2632, 0.9455, 0.9106, 0.8066, 0.8357], + device='cuda:0'), covar=tensor([5.2850, 5.3915, 3.6486, 7.2738, 8.6413, 5.5724, 6.8960, 6.1384], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0319, 0.0253, 0.0414, 0.0276, 0.0264, 0.0314, 0.0260], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0003, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 10:33:50,408 INFO [finetune.py:976] (0/7) Epoch 1, batch 1600, loss[loss=0.8754, simple_loss=0.6272, pruned_loss=0.6085, over 4912.00 frames. ], tot_loss[loss=0.9907, simple_loss=0.6901, pruned_loss=0.749, over 956753.67 frames. ], batch size: 43, lr: 4.00e-03, grad_scale: 1.0 +2023-04-26 10:34:04,217 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1611.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:34:04,239 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9296, 1.3148, 1.8505, 2.2287, 1.4935, 1.0065, 1.5517, 0.8917], + device='cuda:0'), covar=tensor([0.0412, 0.0487, 0.0736, 0.0289, 0.0499, 0.1414, 0.0549, 0.1737], + device='cuda:0'), in_proj_covar=tensor([0.0080, 0.0090, 0.0083, 0.0088, 0.0104, 0.0104, 0.0102, 0.0089], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-26 10:34:13,529 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 5.628e+01 9.315e+01 1.324e+02 1.713e+02 3.757e+02, threshold=2.648e+02, percent-clipped=49.0 +2023-04-26 10:34:24,070 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1627.0, num_to_drop=2, layers_to_drop={1, 3} +2023-04-26 10:34:48,569 INFO [finetune.py:976] (0/7) Epoch 1, batch 1650, loss[loss=0.8363, simple_loss=0.6022, pruned_loss=0.5726, over 4893.00 frames. ], tot_loss[loss=0.9591, simple_loss=0.672, pruned_loss=0.7128, over 956223.41 frames. ], batch size: 35, lr: 4.00e-03, grad_scale: 1.0 +2023-04-26 10:35:08,304 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1672.0, num_to_drop=2, layers_to_drop={0, 1} +2023-04-26 10:35:36,514 INFO [finetune.py:976] (0/7) Epoch 1, batch 1700, loss[loss=0.6967, simple_loss=0.5239, pruned_loss=0.4571, over 4822.00 frames. ], tot_loss[loss=0.9246, simple_loss=0.6538, pruned_loss=0.6744, over 955547.72 frames. ], batch size: 25, lr: 4.00e-03, grad_scale: 1.0 +2023-04-26 10:35:59,306 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.556e+01 1.723e+02 2.061e+02 2.467e+02 4.417e+02, threshold=4.123e+02, percent-clipped=15.0 +2023-04-26 10:36:05,035 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.99 vs. limit=5.0 +2023-04-26 10:36:14,431 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9683, 3.7918, 2.8163, 4.4549, 3.8112, 3.8685, 1.6300, 3.7246], + device='cuda:0'), covar=tensor([0.1774, 0.1430, 0.3812, 0.1526, 0.3297, 0.1844, 0.6656, 0.2356], + device='cuda:0'), in_proj_covar=tensor([0.0264, 0.0239, 0.0292, 0.0332, 0.0330, 0.0272, 0.0290, 0.0287], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0003, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 10:36:24,059 INFO [finetune.py:976] (0/7) Epoch 1, batch 1750, loss[loss=0.8133, simple_loss=0.6229, pruned_loss=0.5215, over 4811.00 frames. ], tot_loss[loss=0.9016, simple_loss=0.6456, pruned_loss=0.6436, over 956827.92 frames. ], batch size: 39, lr: 4.00e-03, grad_scale: 1.0 +2023-04-26 10:36:31,963 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0669, 2.0268, 1.7538, 1.6841, 2.0839, 2.0659, 2.5243, 1.3380], + device='cuda:0'), covar=tensor([0.1775, 0.1004, 0.1549, 0.1274, 0.0926, 0.1112, 0.0563, 0.2147], + device='cuda:0'), in_proj_covar=tensor([0.0330, 0.0344, 0.0414, 0.0355, 0.0399, 0.0372, 0.0386, 0.0400], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 10:36:58,573 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1790.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:37:11,178 INFO [finetune.py:976] (0/7) Epoch 1, batch 1800, loss[loss=0.7397, simple_loss=0.5772, pruned_loss=0.4639, over 4884.00 frames. ], tot_loss[loss=0.8757, simple_loss=0.6372, pruned_loss=0.6111, over 957853.63 frames. ], batch size: 32, lr: 4.00e-03, grad_scale: 1.0 +2023-04-26 10:37:21,899 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=1809.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 10:37:28,278 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.499e+02 2.323e+02 2.803e+02 3.399e+02 5.478e+02, threshold=5.607e+02, percent-clipped=9.0 +2023-04-26 10:37:29,504 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1032, 1.5799, 1.2965, 1.4310, 1.4127, 1.5074, 1.3665, 2.9954], + device='cuda:0'), covar=tensor([0.1021, 0.1091, 0.1260, 0.2517, 0.1318, 0.0909, 0.1192, 0.0273], + device='cuda:0'), in_proj_covar=tensor([0.0041, 0.0045, 0.0045, 0.0051, 0.0046, 0.0043, 0.0045, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0014, 0.0014, 0.0016, 0.0015, 0.0013, 0.0014, 0.0019], + device='cuda:0') +2023-04-26 10:37:40,484 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1838.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:37:47,697 INFO [finetune.py:976] (0/7) Epoch 1, batch 1850, loss[loss=0.5235, simple_loss=0.4278, pruned_loss=0.3147, over 4760.00 frames. ], tot_loss[loss=0.8408, simple_loss=0.6223, pruned_loss=0.5739, over 957772.35 frames. ], batch size: 23, lr: 4.00e-03, grad_scale: 1.0 +2023-04-26 10:37:51,630 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=1857.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:37:51,658 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1857.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:37:59,985 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=1870.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:38:17,756 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1899.0, num_to_drop=2, layers_to_drop={0, 3} +2023-04-26 10:38:18,739 INFO [finetune.py:976] (0/7) Epoch 1, batch 1900, loss[loss=0.6602, simple_loss=0.5447, pruned_loss=0.3918, over 4846.00 frames. ], tot_loss[loss=0.8059, simple_loss=0.6069, pruned_loss=0.5382, over 957980.87 frames. ], batch size: 49, lr: 4.00e-03, grad_scale: 1.0 +2023-04-26 10:38:28,761 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.629e+02 2.504e+02 3.040e+02 3.671e+02 6.110e+02, threshold=6.080e+02, percent-clipped=2.0 +2023-04-26 10:38:28,885 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1918.0, num_to_drop=2, layers_to_drop={1, 3} +2023-04-26 10:38:32,034 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1922.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 10:38:34,735 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.31 vs. limit=5.0 +2023-04-26 10:38:39,016 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=1931.0, num_to_drop=2, layers_to_drop={2, 3} +2023-04-26 10:38:50,101 INFO [finetune.py:976] (0/7) Epoch 1, batch 1950, loss[loss=0.6078, simple_loss=0.5068, pruned_loss=0.356, over 4905.00 frames. ], tot_loss[loss=0.766, simple_loss=0.5866, pruned_loss=0.5011, over 958651.39 frames. ], batch size: 37, lr: 4.00e-03, grad_scale: 1.0 +2023-04-26 10:38:59,728 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=1967.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:39:13,979 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8368, 1.4829, 1.6797, 2.1589, 2.4175, 1.6767, 1.2601, 1.8513], + device='cuda:0'), covar=tensor([0.0871, 0.1605, 0.1045, 0.0635, 0.0453, 0.0914, 0.1173, 0.0813], + device='cuda:0'), in_proj_covar=tensor([0.0201, 0.0213, 0.0191, 0.0169, 0.0170, 0.0187, 0.0164, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 10:39:20,822 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-2000.pt +2023-04-26 10:39:22,528 INFO [finetune.py:976] (0/7) Epoch 1, batch 2000, loss[loss=0.5239, simple_loss=0.4472, pruned_loss=0.3003, over 4864.00 frames. ], tot_loss[loss=0.7271, simple_loss=0.566, pruned_loss=0.4664, over 958895.07 frames. ], batch size: 31, lr: 4.00e-03, grad_scale: 2.0 +2023-04-26 10:39:23,216 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2477, 1.8406, 1.5633, 1.6730, 1.7287, 2.1667, 1.7696, 3.9892], + device='cuda:0'), covar=tensor([0.1190, 0.1076, 0.1309, 0.2596, 0.1311, 0.0903, 0.1160, 0.0137], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0044, 0.0044, 0.0050, 0.0045, 0.0042, 0.0044, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0014, 0.0016, 0.0014, 0.0013, 0.0014, 0.0018], + device='cuda:0') +2023-04-26 10:39:39,765 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.592e+02 2.794e+02 3.334e+02 3.918e+02 6.535e+02, threshold=6.668e+02, percent-clipped=3.0 +2023-04-26 10:39:51,759 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.4287, 3.3595, 2.6116, 3.8459, 3.2970, 3.4062, 1.6055, 3.2805], + device='cuda:0'), covar=tensor([0.1500, 0.1117, 0.2969, 0.1737, 0.2464, 0.1600, 0.4686, 0.2032], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0235, 0.0285, 0.0325, 0.0324, 0.0268, 0.0282, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 10:40:01,844 INFO [finetune.py:976] (0/7) Epoch 1, batch 2050, loss[loss=0.523, simple_loss=0.4609, pruned_loss=0.2926, over 4826.00 frames. ], tot_loss[loss=0.6847, simple_loss=0.5429, pruned_loss=0.4306, over 956592.39 frames. ], batch size: 33, lr: 4.00e-03, grad_scale: 2.0 +2023-04-26 10:40:06,057 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4123, 1.9213, 1.3909, 1.8126, 1.3245, 1.3647, 1.7962, 1.3144], + device='cuda:0'), covar=tensor([0.1745, 0.0964, 0.1260, 0.1200, 0.2724, 0.1410, 0.1450, 0.1904], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0290, 0.0213, 0.0270, 0.0287, 0.0247, 0.0249, 0.0259], + device='cuda:0'), out_proj_covar=tensor([1.1312e-04, 1.1855e-04, 8.7136e-05, 1.0950e-04, 1.1933e-04, 9.9719e-05, + 1.0335e-04, 1.0540e-04], device='cuda:0') +2023-04-26 10:40:09,630 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-26 10:40:37,624 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2090.0, num_to_drop=2, layers_to_drop={0, 1} +2023-04-26 10:40:49,295 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.22 vs. limit=5.0 +2023-04-26 10:40:50,242 INFO [finetune.py:976] (0/7) Epoch 1, batch 2100, loss[loss=0.6237, simple_loss=0.5186, pruned_loss=0.3644, over 4115.00 frames. ], tot_loss[loss=0.6528, simple_loss=0.5276, pruned_loss=0.4025, over 956254.92 frames. ], batch size: 65, lr: 4.00e-03, grad_scale: 2.0 +2023-04-26 10:40:52,207 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.44 vs. limit=5.0 +2023-04-26 10:41:11,498 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.614e+02 2.534e+02 2.914e+02 3.246e+02 6.149e+02, threshold=5.827e+02, percent-clipped=0.0 +2023-04-26 10:41:31,945 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=2138.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:41:34,256 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-26 10:41:41,044 INFO [finetune.py:976] (0/7) Epoch 1, batch 2150, loss[loss=0.6029, simple_loss=0.5327, pruned_loss=0.3366, over 4912.00 frames. ], tot_loss[loss=0.6343, simple_loss=0.5226, pruned_loss=0.3835, over 955858.96 frames. ], batch size: 36, lr: 4.00e-03, grad_scale: 4.0 +2023-04-26 10:42:25,848 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2194.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:42:28,741 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0774, 1.8337, 1.4710, 1.5910, 1.5373, 1.7764, 1.7230, 3.6911], + device='cuda:0'), covar=tensor([0.1003, 0.0876, 0.0943, 0.1774, 0.0938, 0.0809, 0.0870, 0.0134], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0041, 0.0042, 0.0047, 0.0043, 0.0040, 0.0042, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 10:42:30,521 INFO [finetune.py:976] (0/7) Epoch 1, batch 2200, loss[loss=0.4996, simple_loss=0.4607, pruned_loss=0.2692, over 4925.00 frames. ], tot_loss[loss=0.6123, simple_loss=0.5141, pruned_loss=0.3634, over 955984.97 frames. ], batch size: 38, lr: 4.00e-03, grad_scale: 4.0 +2023-04-26 10:42:36,060 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-26 10:42:37,622 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2213.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:42:40,428 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.685e+02 2.565e+02 3.033e+02 3.519e+02 5.393e+02, threshold=6.065e+02, percent-clipped=0.0 +2023-04-26 10:42:42,822 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2222.0, num_to_drop=2, layers_to_drop={0, 2} +2023-04-26 10:42:45,138 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2226.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:43:02,641 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-26 10:43:02,982 INFO [finetune.py:976] (0/7) Epoch 1, batch 2250, loss[loss=0.5719, simple_loss=0.513, pruned_loss=0.3154, over 4799.00 frames. ], tot_loss[loss=0.5908, simple_loss=0.504, pruned_loss=0.3452, over 955068.40 frames. ], batch size: 51, lr: 4.00e-03, grad_scale: 4.0 +2023-04-26 10:43:10,735 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=9.19 vs. limit=5.0 +2023-04-26 10:43:12,374 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2267.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 10:43:14,134 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=2270.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:43:57,382 INFO [finetune.py:976] (0/7) Epoch 1, batch 2300, loss[loss=0.4412, simple_loss=0.4219, pruned_loss=0.2302, over 4838.00 frames. ], tot_loss[loss=0.5697, simple_loss=0.4935, pruned_loss=0.3279, over 954962.86 frames. ], batch size: 49, lr: 4.00e-03, grad_scale: 4.0 +2023-04-26 10:44:17,314 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=2315.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:44:19,006 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.752e+02 2.423e+02 2.781e+02 3.392e+02 7.688e+02, threshold=5.562e+02, percent-clipped=1.0 +2023-04-26 10:44:43,391 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2340.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:44:51,821 INFO [finetune.py:976] (0/7) Epoch 1, batch 2350, loss[loss=0.4198, simple_loss=0.3927, pruned_loss=0.2234, over 4834.00 frames. ], tot_loss[loss=0.5446, simple_loss=0.4788, pruned_loss=0.3091, over 955495.06 frames. ], batch size: 30, lr: 4.00e-03, grad_scale: 4.0 +2023-04-26 10:45:40,786 INFO [finetune.py:976] (0/7) Epoch 1, batch 2400, loss[loss=0.411, simple_loss=0.3894, pruned_loss=0.2163, over 4691.00 frames. ], tot_loss[loss=0.5259, simple_loss=0.468, pruned_loss=0.2949, over 953942.32 frames. ], batch size: 23, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:45:40,913 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2401.0, num_to_drop=2, layers_to_drop={1, 3} +2023-04-26 10:45:51,690 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.659e+02 2.375e+02 2.859e+02 3.332e+02 6.118e+02, threshold=5.719e+02, percent-clipped=1.0 +2023-04-26 10:46:11,802 INFO [finetune.py:976] (0/7) Epoch 1, batch 2450, loss[loss=0.5165, simple_loss=0.4571, pruned_loss=0.2879, over 4846.00 frames. ], tot_loss[loss=0.5092, simple_loss=0.4581, pruned_loss=0.2825, over 954491.44 frames. ], batch size: 47, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:46:19,890 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-26 10:46:38,533 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.2849, 4.0473, 3.0937, 4.8325, 4.0865, 4.2174, 1.9707, 4.0565], + device='cuda:0'), covar=tensor([0.1150, 0.0868, 0.3418, 0.0819, 0.2247, 0.1307, 0.4606, 0.1897], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0233, 0.0284, 0.0326, 0.0320, 0.0268, 0.0281, 0.0281], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 10:46:39,127 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2494.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 10:46:44,083 INFO [finetune.py:976] (0/7) Epoch 1, batch 2500, loss[loss=0.4726, simple_loss=0.4468, pruned_loss=0.2492, over 4820.00 frames. ], tot_loss[loss=0.5017, simple_loss=0.4558, pruned_loss=0.2756, over 956330.74 frames. ], batch size: 51, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:46:52,644 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2513.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:46:56,028 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.542e+02 2.485e+02 2.798e+02 3.305e+02 6.030e+02, threshold=5.597e+02, percent-clipped=1.0 +2023-04-26 10:47:00,940 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2526.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:47:10,436 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=2542.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:47:16,162 INFO [finetune.py:976] (0/7) Epoch 1, batch 2550, loss[loss=0.4862, simple_loss=0.4778, pruned_loss=0.2473, over 4924.00 frames. ], tot_loss[loss=0.4942, simple_loss=0.4542, pruned_loss=0.2685, over 956091.01 frames. ], batch size: 42, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:47:34,851 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=2561.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:47:38,370 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-26 10:47:50,039 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=2574.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:47:53,707 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-26 10:48:06,448 INFO [finetune.py:976] (0/7) Epoch 1, batch 2600, loss[loss=0.3279, simple_loss=0.3325, pruned_loss=0.1616, over 4103.00 frames. ], tot_loss[loss=0.4868, simple_loss=0.4514, pruned_loss=0.2621, over 953759.10 frames. ], batch size: 18, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:48:09,491 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8652, 0.8520, 1.0197, 1.2432, 1.0512, 0.8297, 0.8949, 0.8599], + device='cuda:0'), covar=tensor([25.4061, 38.1329, 39.5479, 24.2016, 30.9894, 50.9104, 59.9381, 33.5468], + device='cuda:0'), in_proj_covar=tensor([0.0470, 0.0533, 0.0603, 0.0576, 0.0510, 0.0576, 0.0591, 0.0589], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 10:48:18,462 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.395e+02 2.465e+02 2.854e+02 3.439e+02 6.010e+02, threshold=5.707e+02, percent-clipped=1.0 +2023-04-26 10:48:27,820 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7291, 3.6687, 2.6102, 4.1728, 3.5887, 3.7239, 1.5805, 3.5757], + device='cuda:0'), covar=tensor([0.1384, 0.0928, 0.3143, 0.1566, 0.2445, 0.1481, 0.4925, 0.1952], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0234, 0.0285, 0.0326, 0.0321, 0.0269, 0.0283, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 10:48:29,139 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-04-26 10:48:38,251 INFO [finetune.py:976] (0/7) Epoch 1, batch 2650, loss[loss=0.4702, simple_loss=0.458, pruned_loss=0.2412, over 4777.00 frames. ], tot_loss[loss=0.478, simple_loss=0.4472, pruned_loss=0.2553, over 952485.89 frames. ], batch size: 29, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:48:40,004 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6023, 2.6368, 1.3946, 1.5801, 1.1584, 1.1582, 1.4314, 1.0150], + device='cuda:0'), covar=tensor([0.4806, 0.4180, 0.6641, 0.8099, 0.6760, 0.6343, 0.5497, 0.6657], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0232, 0.0212, 0.0228, 0.0247, 0.0207, 0.0203, 0.0221], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 10:48:55,252 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0528, 1.6305, 1.5154, 1.7205, 1.6158, 1.6207, 1.6474, 3.1383], + device='cuda:0'), covar=tensor([0.0830, 0.0786, 0.0858, 0.1499, 0.0763, 0.0615, 0.0784, 0.0166], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0041, 0.0041, 0.0046, 0.0042, 0.0040, 0.0041, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0011, 0.0012, 0.0013, 0.0015, 0.0013, 0.0012, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 10:49:01,798 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2685.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:49:14,012 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=2696.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:49:16,818 INFO [finetune.py:976] (0/7) Epoch 1, batch 2700, loss[loss=0.4388, simple_loss=0.4194, pruned_loss=0.2291, over 4766.00 frames. ], tot_loss[loss=0.4629, simple_loss=0.438, pruned_loss=0.2445, over 951219.07 frames. ], batch size: 59, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:49:39,494 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.764e+02 2.585e+02 2.984e+02 3.485e+02 4.746e+02, threshold=5.968e+02, percent-clipped=0.0 +2023-04-26 10:50:04,154 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1065, 2.1084, 1.8427, 1.8968, 2.3025, 1.7687, 2.7868, 1.6530], + device='cuda:0'), covar=tensor([0.2902, 0.0972, 0.1978, 0.1672, 0.1054, 0.1702, 0.0581, 0.2283], + device='cuda:0'), in_proj_covar=tensor([0.0298, 0.0308, 0.0373, 0.0319, 0.0356, 0.0333, 0.0344, 0.0358], + device='cuda:0'), out_proj_covar=tensor([9.2290e-05, 9.5058e-05, 1.1541e-04, 9.9741e-05, 1.0929e-04, 1.0177e-04, + 1.0385e-04, 1.1121e-04], device='cuda:0') +2023-04-26 10:50:04,161 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2746.0, num_to_drop=2, layers_to_drop={0, 3} +2023-04-26 10:50:13,030 INFO [finetune.py:976] (0/7) Epoch 1, batch 2750, loss[loss=0.4454, simple_loss=0.4186, pruned_loss=0.2361, over 4835.00 frames. ], tot_loss[loss=0.4523, simple_loss=0.4307, pruned_loss=0.2374, over 952759.31 frames. ], batch size: 33, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:50:13,193 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=9.27 vs. limit=5.0 +2023-04-26 10:51:06,574 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-26 10:51:16,387 INFO [finetune.py:976] (0/7) Epoch 1, batch 2800, loss[loss=0.3471, simple_loss=0.3508, pruned_loss=0.1717, over 4809.00 frames. ], tot_loss[loss=0.4397, simple_loss=0.4218, pruned_loss=0.2293, over 953987.30 frames. ], batch size: 25, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:51:38,603 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.584e+02 2.321e+02 2.765e+02 3.400e+02 8.366e+02, threshold=5.531e+02, percent-clipped=2.0 +2023-04-26 10:51:49,426 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7291, 2.1822, 1.5658, 2.1322, 1.6836, 1.5170, 1.9756, 1.3702], + device='cuda:0'), covar=tensor([0.2016, 0.1788, 0.1623, 0.1555, 0.3043, 0.1700, 0.1778, 0.2969], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0306, 0.0226, 0.0286, 0.0299, 0.0261, 0.0261, 0.0275], + device='cuda:0'), out_proj_covar=tensor([1.1955e-04, 1.2530e-04, 9.2857e-05, 1.1567e-04, 1.2412e-04, 1.0568e-04, + 1.0823e-04, 1.1227e-04], device='cuda:0') +2023-04-26 10:51:49,433 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2824.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:52:24,239 INFO [finetune.py:976] (0/7) Epoch 1, batch 2850, loss[loss=0.3971, simple_loss=0.4016, pruned_loss=0.1963, over 4835.00 frames. ], tot_loss[loss=0.4313, simple_loss=0.4166, pruned_loss=0.2233, over 953880.70 frames. ], batch size: 33, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:52:24,418 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.20 vs. limit=5.0 +2023-04-26 10:52:36,628 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2272, 1.2734, 5.3721, 5.0004, 4.7155, 5.0647, 4.8065, 4.7026], + device='cuda:0'), covar=tensor([0.5864, 0.6603, 0.0861, 0.1582, 0.1290, 0.1009, 0.0999, 0.1462], + device='cuda:0'), in_proj_covar=tensor([0.0347, 0.0320, 0.0459, 0.0469, 0.0386, 0.0440, 0.0353, 0.0409], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 10:53:07,931 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2885.0, num_to_drop=2, layers_to_drop={1, 3} +2023-04-26 10:53:29,244 INFO [finetune.py:976] (0/7) Epoch 1, batch 2900, loss[loss=0.3401, simple_loss=0.3439, pruned_loss=0.1681, over 3859.00 frames. ], tot_loss[loss=0.4294, simple_loss=0.4173, pruned_loss=0.221, over 954373.50 frames. ], batch size: 16, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:53:41,218 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.1863, 4.1113, 2.9560, 4.6505, 4.0160, 4.1263, 1.7081, 3.9370], + device='cuda:0'), covar=tensor([0.1379, 0.0832, 0.2476, 0.1082, 0.3176, 0.1464, 0.4868, 0.1906], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0232, 0.0283, 0.0326, 0.0320, 0.0270, 0.0283, 0.0283], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 10:53:46,016 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.613e+02 2.467e+02 2.930e+02 3.474e+02 6.951e+02, threshold=5.860e+02, percent-clipped=1.0 +2023-04-26 10:53:59,790 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=2937.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:54:08,443 INFO [finetune.py:976] (0/7) Epoch 1, batch 2950, loss[loss=0.3974, simple_loss=0.406, pruned_loss=0.1944, over 4898.00 frames. ], tot_loss[loss=0.4258, simple_loss=0.4172, pruned_loss=0.2173, over 953193.40 frames. ], batch size: 43, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:54:16,934 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=7.72 vs. limit=5.0 +2023-04-26 10:54:23,855 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2443, 1.8174, 1.6017, 1.9060, 1.7785, 1.9955, 1.7729, 3.6915], + device='cuda:0'), covar=tensor([0.0765, 0.0714, 0.0812, 0.1366, 0.0715, 0.0579, 0.0701, 0.0176], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0040, 0.0041, 0.0046, 0.0041, 0.0040, 0.0040, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0011, 0.0012, 0.0013, 0.0015, 0.0013, 0.0012, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 10:54:37,954 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=2996.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 10:54:39,129 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=2998.0, num_to_drop=2, layers_to_drop={0, 2} +2023-04-26 10:54:40,892 INFO [finetune.py:976] (0/7) Epoch 1, batch 3000, loss[loss=0.4489, simple_loss=0.4484, pruned_loss=0.2247, over 4855.00 frames. ], tot_loss[loss=0.4213, simple_loss=0.4147, pruned_loss=0.2142, over 952093.30 frames. ], batch size: 44, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:54:40,893 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 10:54:46,814 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2554, 1.0257, 1.6272, 1.5469, 1.1096, 0.9167, 1.1401, 0.7420], + device='cuda:0'), covar=tensor([0.1884, 0.1463, 0.0716, 0.0865, 0.2040, 0.2630, 0.1503, 0.1927], + device='cuda:0'), in_proj_covar=tensor([0.0078, 0.0086, 0.0080, 0.0083, 0.0100, 0.0102, 0.0100, 0.0086], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0004, 0.0003, 0.0004, 0.0004, 0.0004, 0.0004, 0.0004], + device='cuda:0') +2023-04-26 10:54:46,985 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3991, 3.4041, 2.6919, 3.8210, 3.4195, 3.3586, 1.5589, 3.3362], + device='cuda:0'), covar=tensor([0.1278, 0.1070, 0.2610, 0.1717, 0.2172, 0.1543, 0.4596, 0.1789], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0232, 0.0283, 0.0326, 0.0320, 0.0271, 0.0283, 0.0283], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 10:54:51,383 INFO [finetune.py:1010] (0/7) Epoch 1, validation: loss=0.4217, simple_loss=0.4614, pruned_loss=0.191, over 2265189.00 frames. +2023-04-26 10:54:51,383 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 5691MB +2023-04-26 10:54:56,299 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6521, 1.4621, 1.7479, 1.9249, 1.9442, 1.4522, 1.0796, 1.7148], + device='cuda:0'), covar=tensor([0.1196, 0.1414, 0.0859, 0.0900, 0.0802, 0.1319, 0.1485, 0.0859], + device='cuda:0'), in_proj_covar=tensor([0.0203, 0.0209, 0.0189, 0.0173, 0.0172, 0.0189, 0.0167, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 10:55:01,572 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.562e+02 2.517e+02 2.904e+02 3.818e+02 1.122e+03, threshold=5.808e+02, percent-clipped=2.0 +2023-04-26 10:55:02,264 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3019.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:55:08,317 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-26 10:55:16,253 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3041.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:55:18,041 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=3044.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:55:23,632 INFO [finetune.py:976] (0/7) Epoch 1, batch 3050, loss[loss=0.4435, simple_loss=0.4306, pruned_loss=0.2282, over 4166.00 frames. ], tot_loss[loss=0.4185, simple_loss=0.4146, pruned_loss=0.2113, over 952309.83 frames. ], batch size: 65, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:55:37,569 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-26 10:55:42,251 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3080.0, num_to_drop=2, layers_to_drop={1, 2} +2023-04-26 10:55:56,456 INFO [finetune.py:976] (0/7) Epoch 1, batch 3100, loss[loss=0.4689, simple_loss=0.4443, pruned_loss=0.2467, over 4899.00 frames. ], tot_loss[loss=0.4125, simple_loss=0.4102, pruned_loss=0.2075, over 951866.94 frames. ], batch size: 36, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:56:12,962 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.494e+02 2.339e+02 2.765e+02 3.275e+02 7.045e+02, threshold=5.531e+02, percent-clipped=2.0 +2023-04-26 10:56:55,554 INFO [finetune.py:976] (0/7) Epoch 1, batch 3150, loss[loss=0.3589, simple_loss=0.3804, pruned_loss=0.1687, over 4785.00 frames. ], tot_loss[loss=0.4026, simple_loss=0.4028, pruned_loss=0.2013, over 953847.85 frames. ], batch size: 26, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:57:32,974 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3180.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:57:46,511 INFO [finetune.py:976] (0/7) Epoch 1, batch 3200, loss[loss=0.4239, simple_loss=0.4289, pruned_loss=0.2095, over 4841.00 frames. ], tot_loss[loss=0.3933, simple_loss=0.3955, pruned_loss=0.1956, over 953034.92 frames. ], batch size: 47, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:58:16,459 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.736e+02 2.453e+02 2.807e+02 3.222e+02 7.994e+02, threshold=5.615e+02, percent-clipped=2.0 +2023-04-26 10:58:50,760 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5093, 1.1944, 4.0682, 3.7748, 3.5767, 3.7827, 3.8157, 3.5724], + device='cuda:0'), covar=tensor([0.6604, 0.5933, 0.1056, 0.1717, 0.1235, 0.2067, 0.1666, 0.1457], + device='cuda:0'), in_proj_covar=tensor([0.0347, 0.0321, 0.0460, 0.0470, 0.0388, 0.0442, 0.0352, 0.0412], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 10:58:52,918 INFO [finetune.py:976] (0/7) Epoch 1, batch 3250, loss[loss=0.418, simple_loss=0.4261, pruned_loss=0.205, over 4839.00 frames. ], tot_loss[loss=0.3916, simple_loss=0.3946, pruned_loss=0.1944, over 953298.01 frames. ], batch size: 49, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 10:59:36,036 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5845, 1.2207, 0.5980, 1.2076, 1.4478, 1.4589, 1.3786, 1.3404], + device='cuda:0'), covar=tensor([0.0658, 0.0594, 0.0589, 0.0745, 0.0394, 0.0698, 0.0645, 0.0892], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0024, 0.0031, 0.0021, 0.0031, 0.0030, 0.0033], + device='cuda:0'), out_proj_covar=tensor([0.0048, 0.0043, 0.0037, 0.0049, 0.0036, 0.0048, 0.0047, 0.0051], + device='cuda:0') +2023-04-26 10:59:47,322 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3288.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 10:59:50,300 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3293.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 10:59:57,216 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4261, 1.4130, 0.6377, 1.1424, 1.7456, 1.3353, 1.2872, 1.3180], + device='cuda:0'), covar=tensor([0.0619, 0.0554, 0.0571, 0.0711, 0.0376, 0.0697, 0.0623, 0.0834], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0026, 0.0024, 0.0031, 0.0021, 0.0031, 0.0030, 0.0033], + device='cuda:0'), out_proj_covar=tensor([0.0048, 0.0043, 0.0037, 0.0049, 0.0036, 0.0048, 0.0047, 0.0051], + device='cuda:0') +2023-04-26 11:00:00,689 INFO [finetune.py:976] (0/7) Epoch 1, batch 3300, loss[loss=0.3448, simple_loss=0.3511, pruned_loss=0.1693, over 4372.00 frames. ], tot_loss[loss=0.3953, simple_loss=0.399, pruned_loss=0.1958, over 954069.75 frames. ], batch size: 19, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:00:19,090 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.582e+02 2.400e+02 2.649e+02 3.193e+02 6.501e+02, threshold=5.297e+02, percent-clipped=1.0 +2023-04-26 11:00:21,995 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7178, 1.8019, 2.1047, 2.0901, 2.1404, 1.6173, 1.1592, 1.8302], + device='cuda:0'), covar=tensor([0.1253, 0.1168, 0.0739, 0.0921, 0.0732, 0.1330, 0.1454, 0.0850], + device='cuda:0'), in_proj_covar=tensor([0.0203, 0.0208, 0.0187, 0.0174, 0.0172, 0.0189, 0.0167, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 11:00:33,897 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3341.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:00:38,722 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3349.0, num_to_drop=2, layers_to_drop={1, 2} +2023-04-26 11:00:39,797 INFO [finetune.py:976] (0/7) Epoch 1, batch 3350, loss[loss=0.4296, simple_loss=0.4227, pruned_loss=0.2183, over 4801.00 frames. ], tot_loss[loss=0.394, simple_loss=0.3996, pruned_loss=0.1943, over 953402.24 frames. ], batch size: 51, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:00:57,399 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3375.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:01:05,772 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=3389.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:01:13,021 INFO [finetune.py:976] (0/7) Epoch 1, batch 3400, loss[loss=0.3655, simple_loss=0.385, pruned_loss=0.173, over 4843.00 frames. ], tot_loss[loss=0.3906, simple_loss=0.3978, pruned_loss=0.1918, over 953507.16 frames. ], batch size: 44, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:01:23,591 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.622e+02 2.298e+02 2.697e+02 3.198e+02 5.981e+02, threshold=5.394e+02, percent-clipped=4.0 +2023-04-26 11:01:30,349 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-26 11:01:57,868 INFO [finetune.py:976] (0/7) Epoch 1, batch 3450, loss[loss=0.4127, simple_loss=0.419, pruned_loss=0.2032, over 4830.00 frames. ], tot_loss[loss=0.3882, simple_loss=0.3969, pruned_loss=0.1897, over 952736.06 frames. ], batch size: 49, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:02:35,981 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3480.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:02:49,068 INFO [finetune.py:976] (0/7) Epoch 1, batch 3500, loss[loss=0.3681, simple_loss=0.3757, pruned_loss=0.1802, over 4864.00 frames. ], tot_loss[loss=0.3811, simple_loss=0.3914, pruned_loss=0.1854, over 954364.11 frames. ], batch size: 31, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:03:00,468 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.505e+02 2.494e+02 2.871e+02 4.319e+02 1.287e+03, threshold=5.742e+02, percent-clipped=13.0 +2023-04-26 11:03:07,524 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=3528.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:03:08,273 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-26 11:03:24,401 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3546.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 11:03:27,266 INFO [finetune.py:976] (0/7) Epoch 1, batch 3550, loss[loss=0.349, simple_loss=0.3667, pruned_loss=0.1656, over 4823.00 frames. ], tot_loss[loss=0.3768, simple_loss=0.387, pruned_loss=0.1833, over 955456.42 frames. ], batch size: 41, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:04:11,263 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3593.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:04:16,040 INFO [finetune.py:976] (0/7) Epoch 1, batch 3600, loss[loss=0.3792, simple_loss=0.3922, pruned_loss=0.1831, over 4930.00 frames. ], tot_loss[loss=0.3723, simple_loss=0.3833, pruned_loss=0.1807, over 954260.02 frames. ], batch size: 38, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:04:19,854 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3607.0, num_to_drop=2, layers_to_drop={1, 2} +2023-04-26 11:04:25,844 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5489, 1.5153, 0.8502, 1.2369, 2.0777, 1.4490, 1.4025, 1.4333], + device='cuda:0'), covar=tensor([0.0677, 0.0564, 0.0565, 0.0712, 0.0398, 0.0679, 0.0649, 0.0850], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0021, 0.0031, 0.0030, 0.0033], + device='cuda:0'), out_proj_covar=tensor([0.0048, 0.0043, 0.0038, 0.0049, 0.0036, 0.0048, 0.0047, 0.0051], + device='cuda:0') +2023-04-26 11:04:26,350 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.597e+02 2.647e+02 3.095e+02 3.748e+02 7.550e+02, threshold=6.190e+02, percent-clipped=4.0 +2023-04-26 11:04:42,906 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=3641.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:04:44,763 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3644.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 11:04:48,954 INFO [finetune.py:976] (0/7) Epoch 1, batch 3650, loss[loss=0.3201, simple_loss=0.3513, pruned_loss=0.1444, over 4753.00 frames. ], tot_loss[loss=0.3712, simple_loss=0.3835, pruned_loss=0.1795, over 954841.35 frames. ], batch size: 28, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:05:04,828 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3675.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:05:05,419 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3676.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 11:05:06,146 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-26 11:05:23,024 INFO [finetune.py:976] (0/7) Epoch 1, batch 3700, loss[loss=0.4026, simple_loss=0.4219, pruned_loss=0.1916, over 4813.00 frames. ], tot_loss[loss=0.371, simple_loss=0.3854, pruned_loss=0.1783, over 954153.37 frames. ], batch size: 40, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:05:30,863 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3152, 3.2240, 0.9759, 1.6819, 1.7880, 2.3996, 2.1099, 1.0319], + device='cuda:0'), covar=tensor([0.1363, 0.0900, 0.1867, 0.1332, 0.1038, 0.0933, 0.1224, 0.1918], + device='cuda:0'), in_proj_covar=tensor([0.0122, 0.0270, 0.0149, 0.0131, 0.0143, 0.0166, 0.0129, 0.0132], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 11:05:44,462 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.676e+02 2.590e+02 3.040e+02 3.768e+02 5.314e+02, threshold=6.080e+02, percent-clipped=0.0 +2023-04-26 11:05:53,055 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=3723.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:06:01,623 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-26 11:06:12,556 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3737.0, num_to_drop=2, layers_to_drop={2, 3} +2023-04-26 11:06:21,956 INFO [finetune.py:976] (0/7) Epoch 1, batch 3750, loss[loss=0.421, simple_loss=0.4232, pruned_loss=0.2094, over 4917.00 frames. ], tot_loss[loss=0.3712, simple_loss=0.3868, pruned_loss=0.1778, over 955741.16 frames. ], batch size: 33, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:07:07,656 INFO [finetune.py:976] (0/7) Epoch 1, batch 3800, loss[loss=0.4349, simple_loss=0.4225, pruned_loss=0.2236, over 4787.00 frames. ], tot_loss[loss=0.3704, simple_loss=0.3871, pruned_loss=0.1768, over 955947.66 frames. ], batch size: 29, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:07:29,606 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.740e+02 2.601e+02 3.098e+02 3.867e+02 7.221e+02, threshold=6.196e+02, percent-clipped=5.0 +2023-04-26 11:08:03,205 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 11:08:06,735 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9054, 1.9342, 1.7205, 1.7452, 2.0931, 1.5681, 2.5012, 1.5165], + device='cuda:0'), covar=tensor([0.4150, 0.1361, 0.3963, 0.2380, 0.1451, 0.2398, 0.0831, 0.3871], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0325, 0.0396, 0.0336, 0.0370, 0.0346, 0.0363, 0.0377], + device='cuda:0'), out_proj_covar=tensor([9.6493e-05, 1.0020e-04, 1.2276e-04, 1.0497e-04, 1.1332e-04, 1.0566e-04, + 1.0980e-04, 1.1703e-04], device='cuda:0') +2023-04-26 11:08:14,577 INFO [finetune.py:976] (0/7) Epoch 1, batch 3850, loss[loss=0.3728, simple_loss=0.3883, pruned_loss=0.1787, over 4806.00 frames. ], tot_loss[loss=0.3653, simple_loss=0.3838, pruned_loss=0.1734, over 956113.65 frames. ], batch size: 51, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:08:27,373 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6595, 0.9619, 1.2382, 1.4756, 1.1736, 1.0490, 0.7611, 1.1051], + device='cuda:0'), covar=tensor([1.5416, 2.0957, 0.9612, 2.7917, 2.1419, 1.5922, 3.0462, 2.1708], + device='cuda:0'), in_proj_covar=tensor([0.0260, 0.0282, 0.0220, 0.0342, 0.0241, 0.0233, 0.0276, 0.0228], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 11:09:08,689 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.22 vs. limit=5.0 +2023-04-26 11:09:22,875 INFO [finetune.py:976] (0/7) Epoch 1, batch 3900, loss[loss=0.3089, simple_loss=0.3491, pruned_loss=0.1343, over 4865.00 frames. ], tot_loss[loss=0.3593, simple_loss=0.3778, pruned_loss=0.1704, over 956575.34 frames. ], batch size: 31, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:09:29,493 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=3902.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 11:09:44,795 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.635e+02 2.527e+02 3.007e+02 3.749e+02 8.787e+02, threshold=6.015e+02, percent-clipped=2.0 +2023-04-26 11:09:47,544 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-26 11:09:49,815 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=3926.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:10:02,211 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=3944.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 11:10:07,405 INFO [finetune.py:976] (0/7) Epoch 1, batch 3950, loss[loss=0.3492, simple_loss=0.3695, pruned_loss=0.1644, over 4874.00 frames. ], tot_loss[loss=0.3523, simple_loss=0.3719, pruned_loss=0.1664, over 955436.35 frames. ], batch size: 34, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:10:16,469 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4548, 0.9727, 1.1505, 0.9957, 1.5652, 1.4365, 1.1466, 1.0958], + device='cuda:0'), covar=tensor([0.1121, 0.1513, 0.1920, 0.1688, 0.0635, 0.1032, 0.1165, 0.1710], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0345, 0.0345, 0.0310, 0.0356, 0.0378, 0.0327, 0.0365], + device='cuda:0'), out_proj_covar=tensor([7.2702e-05, 7.4403e-05, 7.4521e-05, 6.4876e-05, 7.5799e-05, 8.2867e-05, + 7.1258e-05, 7.8763e-05], device='cuda:0') +2023-04-26 11:10:26,976 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-26 11:10:30,543 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=3987.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:10:33,537 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=3992.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 11:10:40,441 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-4000.pt +2023-04-26 11:10:42,151 INFO [finetune.py:976] (0/7) Epoch 1, batch 4000, loss[loss=0.3667, simple_loss=0.3848, pruned_loss=0.1743, over 4816.00 frames. ], tot_loss[loss=0.3503, simple_loss=0.3704, pruned_loss=0.1651, over 956572.78 frames. ], batch size: 41, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:10:54,137 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.677e+02 2.386e+02 2.816e+02 3.337e+02 7.046e+02, threshold=5.633e+02, percent-clipped=3.0 +2023-04-26 11:11:02,810 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4032.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 11:11:13,086 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.06 vs. limit=5.0 +2023-04-26 11:11:15,866 INFO [finetune.py:976] (0/7) Epoch 1, batch 4050, loss[loss=0.3525, simple_loss=0.393, pruned_loss=0.156, over 4826.00 frames. ], tot_loss[loss=0.3527, simple_loss=0.3736, pruned_loss=0.1659, over 956746.06 frames. ], batch size: 33, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:11:41,924 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7609, 1.4866, 1.1887, 1.3729, 1.9917, 1.8056, 1.4819, 1.1939], + device='cuda:0'), covar=tensor([0.1322, 0.1497, 0.2557, 0.1969, 0.0867, 0.1315, 0.1820, 0.2142], + device='cuda:0'), in_proj_covar=tensor([0.0334, 0.0346, 0.0347, 0.0312, 0.0358, 0.0381, 0.0328, 0.0367], + device='cuda:0'), out_proj_covar=tensor([7.3165e-05, 7.4590e-05, 7.5012e-05, 6.5376e-05, 7.6235e-05, 8.3442e-05, + 7.1630e-05, 7.9289e-05], device='cuda:0') +2023-04-26 11:11:49,880 INFO [finetune.py:976] (0/7) Epoch 1, batch 4100, loss[loss=0.3229, simple_loss=0.3652, pruned_loss=0.1403, over 4813.00 frames. ], tot_loss[loss=0.3538, simple_loss=0.3757, pruned_loss=0.1659, over 957137.00 frames. ], batch size: 38, lr: 4.00e-03, grad_scale: 8.0 +2023-04-26 11:12:08,252 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.683e+02 2.322e+02 2.767e+02 3.338e+02 6.077e+02, threshold=5.534e+02, percent-clipped=1.0 +2023-04-26 11:12:34,738 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4141.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:12:41,361 INFO [finetune.py:976] (0/7) Epoch 1, batch 4150, loss[loss=0.3385, simple_loss=0.378, pruned_loss=0.1495, over 4849.00 frames. ], tot_loss[loss=0.352, simple_loss=0.3748, pruned_loss=0.1646, over 956498.41 frames. ], batch size: 31, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:12:53,110 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-26 11:13:15,084 INFO [finetune.py:976] (0/7) Epoch 1, batch 4200, loss[loss=0.3124, simple_loss=0.3503, pruned_loss=0.1373, over 4809.00 frames. ], tot_loss[loss=0.3499, simple_loss=0.3742, pruned_loss=0.1628, over 955607.75 frames. ], batch size: 40, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:13:16,223 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4202.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 11:13:16,242 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4202.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:13:28,645 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.716e+02 2.286e+02 2.811e+02 3.257e+02 1.063e+03, threshold=5.622e+02, percent-clipped=1.0 +2023-04-26 11:13:33,195 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.97 vs. limit=5.0 +2023-04-26 11:14:05,464 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=4250.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 11:14:05,987 INFO [finetune.py:976] (0/7) Epoch 1, batch 4250, loss[loss=0.3146, simple_loss=0.3469, pruned_loss=0.1412, over 4891.00 frames. ], tot_loss[loss=0.3447, simple_loss=0.3695, pruned_loss=0.1599, over 955158.59 frames. ], batch size: 35, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:14:51,186 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4282.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:14:59,287 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=4288.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:15:07,280 INFO [finetune.py:976] (0/7) Epoch 1, batch 4300, loss[loss=0.2858, simple_loss=0.3318, pruned_loss=0.1199, over 4748.00 frames. ], tot_loss[loss=0.3402, simple_loss=0.3649, pruned_loss=0.1578, over 954143.85 frames. ], batch size: 28, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:15:19,787 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.630e+02 2.200e+02 2.676e+02 3.122e+02 6.239e+02, threshold=5.353e+02, percent-clipped=1.0 +2023-04-26 11:15:29,485 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4332.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 11:15:32,568 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3390, 2.6722, 1.0250, 1.4479, 2.1071, 1.3057, 3.7066, 1.7733], + device='cuda:0'), covar=tensor([0.0627, 0.0733, 0.0930, 0.1226, 0.0529, 0.1004, 0.0176, 0.0650], + device='cuda:0'), in_proj_covar=tensor([0.0057, 0.0073, 0.0054, 0.0050, 0.0056, 0.0056, 0.0088, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0012, 0.0008], + device='cuda:0') +2023-04-26 11:15:39,955 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=4349.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:15:41,062 INFO [finetune.py:976] (0/7) Epoch 1, batch 4350, loss[loss=0.345, simple_loss=0.3646, pruned_loss=0.1627, over 4748.00 frames. ], tot_loss[loss=0.3329, simple_loss=0.3586, pruned_loss=0.1536, over 955311.75 frames. ], batch size: 54, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:15:41,212 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6007, 0.6142, 0.7235, 0.8860, 0.7952, 0.6603, 0.6618, 0.6826], + device='cuda:0'), covar=tensor([14.6931, 19.9920, 18.7411, 14.9107, 16.9651, 21.3519, 21.9806, 14.1377], + device='cuda:0'), in_proj_covar=tensor([0.0417, 0.0478, 0.0545, 0.0519, 0.0444, 0.0502, 0.0509, 0.0517], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 11:15:52,448 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0939, 1.7459, 1.5285, 2.0362, 1.9266, 2.0560, 1.6972, 4.2810], + device='cuda:0'), covar=tensor([0.0751, 0.0757, 0.0812, 0.1208, 0.0653, 0.0689, 0.0738, 0.0114], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0040, 0.0041, 0.0046, 0.0041, 0.0040, 0.0041, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 11:16:02,409 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=4380.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 11:16:15,320 INFO [finetune.py:976] (0/7) Epoch 1, batch 4400, loss[loss=0.2867, simple_loss=0.3375, pruned_loss=0.1179, over 4785.00 frames. ], tot_loss[loss=0.3319, simple_loss=0.3577, pruned_loss=0.1531, over 953191.87 frames. ], batch size: 29, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:16:26,707 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.735e+02 2.527e+02 2.839e+02 3.555e+02 1.567e+03, threshold=5.678e+02, percent-clipped=5.0 +2023-04-26 11:16:29,142 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-26 11:16:48,904 INFO [finetune.py:976] (0/7) Epoch 1, batch 4450, loss[loss=0.3171, simple_loss=0.3519, pruned_loss=0.1412, over 4188.00 frames. ], tot_loss[loss=0.3368, simple_loss=0.3623, pruned_loss=0.1556, over 952915.72 frames. ], batch size: 65, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:17:19,616 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4497.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:17:22,031 INFO [finetune.py:976] (0/7) Epoch 1, batch 4500, loss[loss=0.332, simple_loss=0.3597, pruned_loss=0.1522, over 4824.00 frames. ], tot_loss[loss=0.3361, simple_loss=0.3633, pruned_loss=0.1545, over 954229.26 frames. ], batch size: 33, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:17:23,346 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5218, 1.4320, 0.8279, 1.2071, 1.8976, 1.4524, 1.3208, 1.4451], + device='cuda:0'), covar=tensor([0.0689, 0.0559, 0.0535, 0.0697, 0.0369, 0.0675, 0.0653, 0.0824], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0033], + device='cuda:0'), out_proj_covar=tensor([0.0048, 0.0043, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0051], + device='cuda:0') +2023-04-26 11:17:32,938 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.661e+02 2.259e+02 2.800e+02 3.318e+02 7.116e+02, threshold=5.601e+02, percent-clipped=2.0 +2023-04-26 11:18:15,784 INFO [finetune.py:976] (0/7) Epoch 1, batch 4550, loss[loss=0.3283, simple_loss=0.3781, pruned_loss=0.1393, over 4896.00 frames. ], tot_loss[loss=0.3339, simple_loss=0.3621, pruned_loss=0.1529, over 955082.22 frames. ], batch size: 37, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:19:01,270 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4582.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:19:13,031 INFO [finetune.py:976] (0/7) Epoch 1, batch 4600, loss[loss=0.352, simple_loss=0.3745, pruned_loss=0.1647, over 4783.00 frames. ], tot_loss[loss=0.3314, simple_loss=0.3602, pruned_loss=0.1513, over 956405.12 frames. ], batch size: 29, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:19:23,133 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-26 11:19:23,471 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 2.361e+02 2.771e+02 3.480e+02 8.913e+02, threshold=5.542e+02, percent-clipped=3.0 +2023-04-26 11:19:33,262 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=4630.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:19:42,844 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=4644.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:19:47,128 INFO [finetune.py:976] (0/7) Epoch 1, batch 4650, loss[loss=0.2895, simple_loss=0.3214, pruned_loss=0.1288, over 4861.00 frames. ], tot_loss[loss=0.3277, simple_loss=0.3569, pruned_loss=0.1493, over 957181.36 frames. ], batch size: 34, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:19:55,392 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-26 11:19:56,549 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.7735, 0.7833, 0.9520, 0.9502, 0.8810, 0.8663, 0.9038, 0.9117], + device='cuda:0'), covar=tensor([13.9590, 20.7781, 20.3451, 17.7857, 16.3732, 21.6480, 22.8805, 14.2224], + device='cuda:0'), in_proj_covar=tensor([0.0418, 0.0480, 0.0550, 0.0523, 0.0445, 0.0504, 0.0509, 0.0518], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 11:20:24,064 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8075, 4.2499, 0.8125, 2.2375, 2.5235, 2.7341, 2.8991, 1.0583], + device='cuda:0'), covar=tensor([0.1268, 0.0678, 0.2232, 0.1277, 0.0868, 0.1138, 0.1072, 0.1888], + device='cuda:0'), in_proj_covar=tensor([0.0122, 0.0269, 0.0150, 0.0131, 0.0142, 0.0165, 0.0128, 0.0132], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 11:20:32,574 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6048, 3.5416, 0.8809, 1.9499, 2.0283, 2.4296, 2.2716, 1.0483], + device='cuda:0'), covar=tensor([0.1195, 0.0738, 0.2022, 0.1215, 0.0917, 0.1033, 0.1191, 0.1754], + device='cuda:0'), in_proj_covar=tensor([0.0122, 0.0269, 0.0150, 0.0131, 0.0142, 0.0165, 0.0128, 0.0132], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 11:20:42,427 INFO [finetune.py:976] (0/7) Epoch 1, batch 4700, loss[loss=0.258, simple_loss=0.2927, pruned_loss=0.1116, over 4919.00 frames. ], tot_loss[loss=0.321, simple_loss=0.3512, pruned_loss=0.1454, over 959305.44 frames. ], batch size: 37, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:21:03,816 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.981e+02 2.474e+02 3.057e+02 6.452e+02, threshold=4.948e+02, percent-clipped=2.0 +2023-04-26 11:21:36,419 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-26 11:21:39,265 INFO [finetune.py:976] (0/7) Epoch 1, batch 4750, loss[loss=0.2999, simple_loss=0.3323, pruned_loss=0.1337, over 4790.00 frames. ], tot_loss[loss=0.3169, simple_loss=0.3476, pruned_loss=0.1431, over 958117.92 frames. ], batch size: 29, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:21:48,606 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1244, 1.0613, 1.2473, 1.3692, 1.1777, 1.2662, 1.2529, 1.2181], + device='cuda:0'), covar=tensor([13.5420, 22.1832, 25.3811, 16.3576, 14.4715, 23.2519, 25.7060, 15.3757], + device='cuda:0'), in_proj_covar=tensor([0.0419, 0.0482, 0.0552, 0.0526, 0.0446, 0.0505, 0.0511, 0.0520], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 11:22:08,658 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2645, 1.6127, 1.4685, 1.8708, 1.7039, 1.8258, 1.5650, 3.0127], + device='cuda:0'), covar=tensor([0.0639, 0.0683, 0.0758, 0.1215, 0.0625, 0.0483, 0.0650, 0.0229], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0040, 0.0041, 0.0046, 0.0042, 0.0041, 0.0041, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 11:22:11,020 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4797.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:22:13,545 INFO [finetune.py:976] (0/7) Epoch 1, batch 4800, loss[loss=0.3182, simple_loss=0.3269, pruned_loss=0.1548, over 3946.00 frames. ], tot_loss[loss=0.32, simple_loss=0.3506, pruned_loss=0.1447, over 955217.37 frames. ], batch size: 17, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:22:24,010 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 2.397e+02 2.799e+02 3.310e+02 5.685e+02, threshold=5.598e+02, percent-clipped=2.0 +2023-04-26 11:22:25,483 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-26 11:22:43,630 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=4845.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:22:47,741 INFO [finetune.py:976] (0/7) Epoch 1, batch 4850, loss[loss=0.2338, simple_loss=0.2867, pruned_loss=0.09045, over 4747.00 frames. ], tot_loss[loss=0.3219, simple_loss=0.3539, pruned_loss=0.1449, over 954790.25 frames. ], batch size: 26, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:23:01,514 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5203, 1.2958, 0.6619, 1.1784, 1.4634, 1.4176, 1.2699, 1.3121], + device='cuda:0'), covar=tensor([0.0641, 0.0512, 0.0573, 0.0660, 0.0388, 0.0605, 0.0592, 0.0787], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0033], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0043, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0051], + device='cuda:0') +2023-04-26 11:23:07,309 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-26 11:23:25,551 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5522, 1.5400, 0.7708, 1.2368, 1.6954, 1.4609, 1.3301, 1.4266], + device='cuda:0'), covar=tensor([0.0645, 0.0528, 0.0523, 0.0698, 0.0369, 0.0665, 0.0616, 0.0808], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0033], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0051], + device='cuda:0') +2023-04-26 11:23:33,607 INFO [finetune.py:976] (0/7) Epoch 1, batch 4900, loss[loss=0.3224, simple_loss=0.3612, pruned_loss=0.1418, over 4798.00 frames. ], tot_loss[loss=0.3212, simple_loss=0.3538, pruned_loss=0.1442, over 955552.13 frames. ], batch size: 51, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:23:49,558 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.488e+02 2.175e+02 2.572e+02 3.151e+02 5.494e+02, threshold=5.143e+02, percent-clipped=0.0 +2023-04-26 11:24:29,535 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=4944.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:24:41,249 INFO [finetune.py:976] (0/7) Epoch 1, batch 4950, loss[loss=0.3285, simple_loss=0.3528, pruned_loss=0.1521, over 4778.00 frames. ], tot_loss[loss=0.3215, simple_loss=0.3544, pruned_loss=0.1443, over 955436.89 frames. ], batch size: 26, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:24:53,216 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5980, 1.3069, 4.3154, 4.0461, 3.7764, 4.1125, 4.0614, 3.7974], + device='cuda:0'), covar=tensor([0.7000, 0.6208, 0.1035, 0.1588, 0.1096, 0.1484, 0.1249, 0.1453], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0319, 0.0455, 0.0463, 0.0382, 0.0438, 0.0345, 0.0408], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 11:24:53,917 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2192, 1.1358, 1.5406, 2.4139, 1.7902, 1.2717, 1.0446, 1.7571], + device='cuda:0'), covar=tensor([0.4138, 0.6368, 0.2865, 0.6743, 0.5627, 0.4087, 0.8309, 0.4828], + device='cuda:0'), in_proj_covar=tensor([0.0253, 0.0272, 0.0213, 0.0332, 0.0231, 0.0225, 0.0267, 0.0219], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 11:25:18,979 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=4992.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:25:26,585 INFO [finetune.py:976] (0/7) Epoch 1, batch 5000, loss[loss=0.3431, simple_loss=0.3617, pruned_loss=0.1622, over 4846.00 frames. ], tot_loss[loss=0.3175, simple_loss=0.3511, pruned_loss=0.1419, over 956333.57 frames. ], batch size: 47, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:25:37,462 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.472e+02 2.232e+02 2.673e+02 3.232e+02 7.029e+02, threshold=5.346e+02, percent-clipped=5.0 +2023-04-26 11:25:38,790 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5654, 3.6627, 0.8703, 2.1457, 2.0157, 2.5120, 2.4632, 1.1496], + device='cuda:0'), covar=tensor([0.1189, 0.0784, 0.1904, 0.1138, 0.1000, 0.1054, 0.1067, 0.1974], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0272, 0.0151, 0.0132, 0.0144, 0.0166, 0.0130, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 11:26:10,645 INFO [finetune.py:976] (0/7) Epoch 1, batch 5050, loss[loss=0.27, simple_loss=0.3134, pruned_loss=0.1133, over 4789.00 frames. ], tot_loss[loss=0.3142, simple_loss=0.3478, pruned_loss=0.1403, over 958251.33 frames. ], batch size: 29, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:26:33,151 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.22 vs. limit=5.0 +2023-04-26 11:26:59,437 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.28 vs. limit=5.0 +2023-04-26 11:27:11,310 INFO [finetune.py:976] (0/7) Epoch 1, batch 5100, loss[loss=0.2762, simple_loss=0.3033, pruned_loss=0.1245, over 4747.00 frames. ], tot_loss[loss=0.3089, simple_loss=0.3426, pruned_loss=0.1376, over 956870.80 frames. ], batch size: 54, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:27:40,805 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 2.109e+02 2.481e+02 2.814e+02 6.642e+02, threshold=4.963e+02, percent-clipped=1.0 +2023-04-26 11:27:41,503 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5294, 3.3872, 1.1578, 1.9568, 1.8445, 2.4031, 2.1337, 0.9795], + device='cuda:0'), covar=tensor([0.1240, 0.0926, 0.1736, 0.1259, 0.1089, 0.1040, 0.1369, 0.1995], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0270, 0.0150, 0.0131, 0.0143, 0.0165, 0.0129, 0.0133], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 11:28:17,913 INFO [finetune.py:976] (0/7) Epoch 1, batch 5150, loss[loss=0.2723, simple_loss=0.3246, pruned_loss=0.11, over 4791.00 frames. ], tot_loss[loss=0.3085, simple_loss=0.342, pruned_loss=0.1375, over 955759.21 frames. ], batch size: 51, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:28:38,404 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7150, 1.0607, 4.2141, 3.8828, 3.6831, 3.9248, 3.9068, 3.7383], + device='cuda:0'), covar=tensor([0.6714, 0.6489, 0.1073, 0.1831, 0.1254, 0.1449, 0.1765, 0.1565], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0318, 0.0451, 0.0460, 0.0380, 0.0436, 0.0343, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 11:29:07,358 INFO [finetune.py:976] (0/7) Epoch 1, batch 5200, loss[loss=0.2848, simple_loss=0.3393, pruned_loss=0.1152, over 4935.00 frames. ], tot_loss[loss=0.31, simple_loss=0.3446, pruned_loss=0.1378, over 954960.17 frames. ], batch size: 33, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:29:21,206 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.499e+02 2.511e+02 2.846e+02 3.705e+02 8.558e+02, threshold=5.692e+02, percent-clipped=9.0 +2023-04-26 11:29:34,940 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9330, 1.1031, 1.1118, 1.1277, 1.2022, 1.3042, 1.1511, 1.1675], + device='cuda:0'), covar=tensor([ 4.9080, 13.0876, 8.6126, 7.0446, 7.2795, 10.9540, 10.8499, 8.7726], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0362, 0.0294, 0.0291, 0.0319, 0.0336, 0.0351, 0.0320], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 11:29:41,592 INFO [finetune.py:976] (0/7) Epoch 1, batch 5250, loss[loss=0.3595, simple_loss=0.394, pruned_loss=0.1625, over 4803.00 frames. ], tot_loss[loss=0.3119, simple_loss=0.3476, pruned_loss=0.1381, over 952585.43 frames. ], batch size: 45, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:30:15,015 INFO [finetune.py:976] (0/7) Epoch 1, batch 5300, loss[loss=0.3085, simple_loss=0.3508, pruned_loss=0.133, over 4811.00 frames. ], tot_loss[loss=0.3131, simple_loss=0.3492, pruned_loss=0.1385, over 953990.79 frames. ], batch size: 45, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:30:27,273 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.471e+02 2.212e+02 2.624e+02 3.137e+02 5.522e+02, threshold=5.248e+02, percent-clipped=0.0 +2023-04-26 11:30:49,038 INFO [finetune.py:976] (0/7) Epoch 1, batch 5350, loss[loss=0.3011, simple_loss=0.3391, pruned_loss=0.1315, over 4745.00 frames. ], tot_loss[loss=0.3098, simple_loss=0.3467, pruned_loss=0.1364, over 951501.48 frames. ], batch size: 54, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:30:54,030 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4454, 1.5265, 1.7058, 1.7820, 1.8209, 1.4571, 1.1045, 1.5975], + device='cuda:0'), covar=tensor([0.1101, 0.1117, 0.0684, 0.0710, 0.0704, 0.0999, 0.1272, 0.0722], + device='cuda:0'), in_proj_covar=tensor([0.0207, 0.0208, 0.0189, 0.0180, 0.0176, 0.0195, 0.0173, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 11:31:35,564 INFO [finetune.py:976] (0/7) Epoch 1, batch 5400, loss[loss=0.2996, simple_loss=0.327, pruned_loss=0.136, over 4818.00 frames. ], tot_loss[loss=0.3075, simple_loss=0.3441, pruned_loss=0.1354, over 953927.88 frames. ], batch size: 39, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:31:52,841 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.437e+02 2.058e+02 2.416e+02 3.015e+02 5.928e+02, threshold=4.831e+02, percent-clipped=1.0 +2023-04-26 11:32:15,535 INFO [finetune.py:976] (0/7) Epoch 1, batch 5450, loss[loss=0.2488, simple_loss=0.2787, pruned_loss=0.1095, over 4784.00 frames. ], tot_loss[loss=0.3019, simple_loss=0.3384, pruned_loss=0.1327, over 955631.26 frames. ], batch size: 23, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:32:49,240 INFO [finetune.py:976] (0/7) Epoch 1, batch 5500, loss[loss=0.2548, simple_loss=0.3013, pruned_loss=0.1041, over 4911.00 frames. ], tot_loss[loss=0.2971, simple_loss=0.3337, pruned_loss=0.1303, over 955137.75 frames. ], batch size: 35, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:32:59,728 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.389e+02 2.158e+02 2.515e+02 3.075e+02 5.534e+02, threshold=5.030e+02, percent-clipped=1.0 +2023-04-26 11:33:46,364 INFO [finetune.py:976] (0/7) Epoch 1, batch 5550, loss[loss=0.338, simple_loss=0.3819, pruned_loss=0.147, over 4907.00 frames. ], tot_loss[loss=0.2975, simple_loss=0.334, pruned_loss=0.1305, over 956040.45 frames. ], batch size: 36, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:34:30,552 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-26 11:34:48,418 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8246, 4.4091, 0.8872, 2.2700, 2.3362, 2.7511, 2.7884, 1.0152], + device='cuda:0'), covar=tensor([0.1307, 0.0987, 0.2224, 0.1363, 0.0991, 0.1195, 0.1284, 0.1984], + device='cuda:0'), in_proj_covar=tensor([0.0124, 0.0272, 0.0150, 0.0132, 0.0143, 0.0166, 0.0130, 0.0133], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 11:34:50,261 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.93 vs. limit=5.0 +2023-04-26 11:34:53,186 INFO [finetune.py:976] (0/7) Epoch 1, batch 5600, loss[loss=0.3296, simple_loss=0.3586, pruned_loss=0.1503, over 4827.00 frames. ], tot_loss[loss=0.3006, simple_loss=0.3381, pruned_loss=0.1316, over 954940.98 frames. ], batch size: 30, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:35:13,911 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.486e+02 2.261e+02 2.635e+02 3.434e+02 7.043e+02, threshold=5.269e+02, percent-clipped=4.0 +2023-04-26 11:35:44,903 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5650.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 11:35:45,418 INFO [finetune.py:976] (0/7) Epoch 1, batch 5650, loss[loss=0.3648, simple_loss=0.3859, pruned_loss=0.1718, over 4057.00 frames. ], tot_loss[loss=0.3031, simple_loss=0.3411, pruned_loss=0.1325, over 954619.16 frames. ], batch size: 66, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:35:57,979 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4618, 1.6471, 1.7568, 1.8152, 1.9119, 1.4272, 1.1006, 1.7239], + device='cuda:0'), covar=tensor([0.1114, 0.1078, 0.0767, 0.0729, 0.0620, 0.1152, 0.1254, 0.0651], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0209, 0.0190, 0.0181, 0.0178, 0.0196, 0.0174, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 11:36:15,235 INFO [finetune.py:976] (0/7) Epoch 1, batch 5700, loss[loss=0.2085, simple_loss=0.2523, pruned_loss=0.08229, over 4617.00 frames. ], tot_loss[loss=0.2994, simple_loss=0.3358, pruned_loss=0.1315, over 934367.61 frames. ], batch size: 20, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:36:21,278 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5711.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 11:36:31,941 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.325e+02 1.970e+02 2.507e+02 3.043e+02 6.051e+02, threshold=5.014e+02, percent-clipped=1.0 +2023-04-26 11:36:44,043 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-1.pt +2023-04-26 11:37:01,400 INFO [finetune.py:976] (0/7) Epoch 2, batch 0, loss[loss=0.3281, simple_loss=0.3629, pruned_loss=0.1467, over 4837.00 frames. ], tot_loss[loss=0.3281, simple_loss=0.3629, pruned_loss=0.1467, over 4837.00 frames. ], batch size: 49, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:37:01,401 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 11:37:23,941 INFO [finetune.py:1010] (0/7) Epoch 2, validation: loss=0.2101, simple_loss=0.2777, pruned_loss=0.0712, over 2265189.00 frames. +2023-04-26 11:37:23,941 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6243MB +2023-04-26 11:37:46,588 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5755.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:38:02,076 INFO [finetune.py:976] (0/7) Epoch 2, batch 50, loss[loss=0.2648, simple_loss=0.3206, pruned_loss=0.1045, over 4818.00 frames. ], tot_loss[loss=0.304, simple_loss=0.3405, pruned_loss=0.1338, over 214140.87 frames. ], batch size: 38, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:38:07,498 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-04-26 11:38:16,560 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.11 vs. limit=5.0 +2023-04-26 11:38:17,916 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8938, 0.6597, 0.8657, 1.2590, 1.1621, 0.9149, 0.8641, 0.8533], + device='cuda:0'), covar=tensor([ 6.9577, 10.8067, 9.9028, 9.6498, 7.9845, 11.1821, 11.0855, 7.8294], + device='cuda:0'), in_proj_covar=tensor([0.0432, 0.0497, 0.0572, 0.0551, 0.0460, 0.0520, 0.0525, 0.0535], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 11:38:27,681 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5816.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:38:29,284 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.431e+02 2.066e+02 2.410e+02 2.890e+02 4.929e+02, threshold=4.819e+02, percent-clipped=0.0 +2023-04-26 11:38:29,361 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8251, 3.7733, 2.9247, 4.3457, 3.7659, 3.7689, 2.0348, 3.6841], + device='cuda:0'), covar=tensor([0.1729, 0.1114, 0.3019, 0.1417, 0.2251, 0.1835, 0.4929, 0.2026], + device='cuda:0'), in_proj_covar=tensor([0.0262, 0.0230, 0.0280, 0.0325, 0.0321, 0.0270, 0.0286, 0.0287], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 11:38:35,784 INFO [finetune.py:976] (0/7) Epoch 2, batch 100, loss[loss=0.3075, simple_loss=0.3416, pruned_loss=0.1367, over 4867.00 frames. ], tot_loss[loss=0.2944, simple_loss=0.3316, pruned_loss=0.1286, over 379154.57 frames. ], batch size: 34, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:38:55,166 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5858.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:39:09,190 INFO [finetune.py:976] (0/7) Epoch 2, batch 150, loss[loss=0.2146, simple_loss=0.2548, pruned_loss=0.08721, over 4711.00 frames. ], tot_loss[loss=0.2871, simple_loss=0.3241, pruned_loss=0.125, over 507994.01 frames. ], batch size: 23, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:39:34,932 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.317e+02 2.049e+02 2.408e+02 2.939e+02 5.453e+02, threshold=4.816e+02, percent-clipped=1.0 +2023-04-26 11:39:35,687 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=5919.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:39:48,162 INFO [finetune.py:976] (0/7) Epoch 2, batch 200, loss[loss=0.2671, simple_loss=0.3147, pruned_loss=0.1098, over 4867.00 frames. ], tot_loss[loss=0.2857, simple_loss=0.3231, pruned_loss=0.1242, over 607768.53 frames. ], batch size: 31, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:40:52,013 INFO [finetune.py:976] (0/7) Epoch 2, batch 250, loss[loss=0.2514, simple_loss=0.3029, pruned_loss=0.09999, over 4900.00 frames. ], tot_loss[loss=0.2908, simple_loss=0.3284, pruned_loss=0.1266, over 685009.13 frames. ], batch size: 35, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:41:06,887 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=5990.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:41:07,041 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-26 11:41:09,870 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-26 11:41:13,410 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-6000.pt +2023-04-26 11:41:18,229 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6006.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 11:41:25,491 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.356e+02 2.171e+02 2.695e+02 3.137e+02 1.432e+03, threshold=5.390e+02, percent-clipped=4.0 +2023-04-26 11:41:33,100 INFO [finetune.py:976] (0/7) Epoch 2, batch 300, loss[loss=0.2791, simple_loss=0.3158, pruned_loss=0.1212, over 4771.00 frames. ], tot_loss[loss=0.294, simple_loss=0.3319, pruned_loss=0.1281, over 744957.48 frames. ], batch size: 26, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:41:49,303 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6043.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:41:53,454 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-26 11:41:54,623 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6051.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:42:11,628 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0446, 2.4863, 1.0560, 1.4219, 1.8539, 1.2112, 3.2653, 1.5959], + device='cuda:0'), covar=tensor([0.0751, 0.0810, 0.1034, 0.1217, 0.0600, 0.1079, 0.0248, 0.0670], + device='cuda:0'), in_proj_covar=tensor([0.0058, 0.0075, 0.0055, 0.0051, 0.0057, 0.0057, 0.0089, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0012, 0.0008], + device='cuda:0') +2023-04-26 11:42:22,356 INFO [finetune.py:976] (0/7) Epoch 2, batch 350, loss[loss=0.2857, simple_loss=0.3318, pruned_loss=0.1197, over 4923.00 frames. ], tot_loss[loss=0.2977, simple_loss=0.3366, pruned_loss=0.1294, over 792799.34 frames. ], batch size: 33, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:42:47,463 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6104.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:42:57,921 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6111.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:43:02,079 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.403e+02 2.085e+02 2.458e+02 2.985e+02 5.837e+02, threshold=4.917e+02, percent-clipped=2.0 +2023-04-26 11:43:14,250 INFO [finetune.py:976] (0/7) Epoch 2, batch 400, loss[loss=0.2891, simple_loss=0.3368, pruned_loss=0.1207, over 4884.00 frames. ], tot_loss[loss=0.2988, simple_loss=0.3385, pruned_loss=0.1296, over 825995.16 frames. ], batch size: 32, lr: 4.00e-03, grad_scale: 16.0 +2023-04-26 11:43:19,688 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6135.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:43:29,729 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0943, 1.7407, 2.4796, 2.5794, 1.7943, 1.4556, 1.9496, 1.1045], + device='cuda:0'), covar=tensor([0.1256, 0.1413, 0.0679, 0.1194, 0.1357, 0.1860, 0.1303, 0.2180], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0079, 0.0075, 0.0075, 0.0089, 0.0095, 0.0090, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 11:43:43,646 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6170.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:43:48,407 INFO [finetune.py:976] (0/7) Epoch 2, batch 450, loss[loss=0.3353, simple_loss=0.3635, pruned_loss=0.1536, over 4915.00 frames. ], tot_loss[loss=0.2971, simple_loss=0.3369, pruned_loss=0.1286, over 855159.84 frames. ], batch size: 46, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:44:00,957 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6196.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:44:06,751 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6203.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:44:11,641 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6211.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:44:13,440 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6214.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:44:15,816 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.476e+02 2.104e+02 2.551e+02 3.094e+02 4.755e+02, threshold=5.103e+02, percent-clipped=0.0 +2023-04-26 11:44:21,877 INFO [finetune.py:976] (0/7) Epoch 2, batch 500, loss[loss=0.2389, simple_loss=0.2849, pruned_loss=0.09644, over 4804.00 frames. ], tot_loss[loss=0.2931, simple_loss=0.3333, pruned_loss=0.1265, over 877122.11 frames. ], batch size: 51, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:44:23,798 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6231.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:44:38,120 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-26 11:44:43,147 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9636, 1.3564, 3.2978, 3.0456, 3.0198, 3.2354, 3.2527, 2.9453], + device='cuda:0'), covar=tensor([0.6825, 0.5307, 0.1429, 0.2181, 0.1329, 0.1560, 0.1418, 0.1689], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0319, 0.0456, 0.0465, 0.0381, 0.0441, 0.0347, 0.0405], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 11:44:46,834 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6264.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:44:48,004 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6266.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:44:51,706 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6272.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:44:52,319 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0417, 2.0752, 1.7409, 1.7114, 2.1963, 1.7442, 2.6225, 1.5360], + device='cuda:0'), covar=tensor([0.4915, 0.2015, 0.5564, 0.3506, 0.2155, 0.2925, 0.1557, 0.4803], + device='cuda:0'), in_proj_covar=tensor([0.0335, 0.0344, 0.0426, 0.0360, 0.0395, 0.0366, 0.0390, 0.0401], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 11:44:55,242 INFO [finetune.py:976] (0/7) Epoch 2, batch 550, loss[loss=0.2708, simple_loss=0.3138, pruned_loss=0.1139, over 4760.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.3291, pruned_loss=0.1243, over 896336.78 frames. ], batch size: 28, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:45:14,994 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6306.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 11:45:15,668 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8233, 1.1852, 1.5015, 1.7725, 1.3260, 1.1893, 0.8003, 1.2579], + device='cuda:0'), covar=tensor([0.6456, 0.8433, 0.4018, 0.9779, 0.9356, 0.6605, 1.2558, 0.8326], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0279, 0.0221, 0.0343, 0.0237, 0.0233, 0.0273, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 11:45:28,233 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.445e+02 2.269e+02 2.692e+02 3.237e+02 6.108e+02, threshold=5.385e+02, percent-clipped=2.0 +2023-04-26 11:45:37,842 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6327.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:45:38,337 INFO [finetune.py:976] (0/7) Epoch 2, batch 600, loss[loss=0.2971, simple_loss=0.3394, pruned_loss=0.1274, over 4876.00 frames. ], tot_loss[loss=0.2889, simple_loss=0.3289, pruned_loss=0.1244, over 911679.84 frames. ], batch size: 34, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:45:39,653 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3936, 1.2862, 3.7910, 3.5440, 3.4124, 3.5846, 3.6229, 3.3692], + device='cuda:0'), covar=tensor([0.6933, 0.5891, 0.1139, 0.1863, 0.1246, 0.2089, 0.1881, 0.1647], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0319, 0.0456, 0.0466, 0.0382, 0.0442, 0.0349, 0.0405], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 11:45:39,838 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-04-26 11:46:00,709 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6346.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:46:12,537 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=6354.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 11:46:39,284 INFO [finetune.py:976] (0/7) Epoch 2, batch 650, loss[loss=0.2697, simple_loss=0.3222, pruned_loss=0.1086, over 4751.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3307, pruned_loss=0.1251, over 922024.67 frames. ], batch size: 54, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:46:51,266 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2444, 1.2908, 1.2737, 1.3102, 1.3714, 1.5392, 1.3286, 1.3367], + device='cuda:0'), covar=tensor([3.6536, 9.4716, 6.7758, 5.1762, 5.7838, 8.3844, 9.3909, 7.3227], + device='cuda:0'), in_proj_covar=tensor([0.0268, 0.0372, 0.0298, 0.0299, 0.0327, 0.0351, 0.0359, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 11:46:53,605 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6399.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:47:02,007 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5510, 1.1223, 0.5351, 1.2008, 1.2192, 1.4560, 1.3170, 1.3084], + device='cuda:0'), covar=tensor([0.0660, 0.0557, 0.0571, 0.0710, 0.0389, 0.0665, 0.0682, 0.0799], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0033], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0051], + device='cuda:0') +2023-04-26 11:47:02,617 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6411.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:47:07,256 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.650e+02 2.377e+02 2.867e+02 3.488e+02 9.630e+02, threshold=5.734e+02, percent-clipped=2.0 +2023-04-26 11:47:13,371 INFO [finetune.py:976] (0/7) Epoch 2, batch 700, loss[loss=0.2897, simple_loss=0.3399, pruned_loss=0.1197, over 4770.00 frames. ], tot_loss[loss=0.2917, simple_loss=0.3323, pruned_loss=0.1256, over 929431.61 frames. ], batch size: 28, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:47:41,780 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5440, 1.6628, 0.7086, 1.2426, 1.7862, 1.4923, 1.3540, 1.4502], + device='cuda:0'), covar=tensor([0.0643, 0.0479, 0.0530, 0.0689, 0.0373, 0.0640, 0.0620, 0.0765], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0033], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0051], + device='cuda:0') +2023-04-26 11:47:46,400 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=6459.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:47:48,754 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6462.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:47:50,533 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6699, 1.1854, 1.2403, 1.2365, 1.8272, 1.5926, 1.2204, 1.2680], + device='cuda:0'), covar=tensor([0.1390, 0.1857, 0.2294, 0.2022, 0.0909, 0.1687, 0.2457, 0.1982], + device='cuda:0'), in_proj_covar=tensor([0.0328, 0.0343, 0.0347, 0.0316, 0.0353, 0.0378, 0.0327, 0.0360], + device='cuda:0'), out_proj_covar=tensor([7.1776e-05, 7.3933e-05, 7.4926e-05, 6.6265e-05, 7.5364e-05, 8.2895e-05, + 7.1162e-05, 7.7758e-05], device='cuda:0') +2023-04-26 11:47:51,739 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6467.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:47:58,779 INFO [finetune.py:976] (0/7) Epoch 2, batch 750, loss[loss=0.344, simple_loss=0.3835, pruned_loss=0.1523, over 4718.00 frames. ], tot_loss[loss=0.2905, simple_loss=0.3317, pruned_loss=0.1246, over 934503.41 frames. ], batch size: 54, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:48:18,135 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6491.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:48:30,391 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-26 11:48:50,919 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6514.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:48:53,698 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.458e+02 2.088e+02 2.383e+02 2.881e+02 6.100e+02, threshold=4.765e+02, percent-clipped=1.0 +2023-04-26 11:49:03,669 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6523.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 11:49:05,425 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6526.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:49:06,607 INFO [finetune.py:976] (0/7) Epoch 2, batch 800, loss[loss=0.25, simple_loss=0.303, pruned_loss=0.09847, over 4805.00 frames. ], tot_loss[loss=0.2886, simple_loss=0.3309, pruned_loss=0.1231, over 939933.88 frames. ], batch size: 25, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:49:06,725 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6528.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:49:26,688 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6559.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:49:29,549 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=6562.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:49:33,145 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6567.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:49:38,650 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-26 11:49:40,229 INFO [finetune.py:976] (0/7) Epoch 2, batch 850, loss[loss=0.2488, simple_loss=0.3081, pruned_loss=0.09474, over 4819.00 frames. ], tot_loss[loss=0.2855, simple_loss=0.3276, pruned_loss=0.1217, over 942554.21 frames. ], batch size: 41, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:49:47,060 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0067, 1.2830, 5.0098, 4.6078, 4.4377, 4.7119, 4.4973, 4.4371], + device='cuda:0'), covar=tensor([0.6565, 0.6356, 0.1006, 0.1872, 0.1095, 0.1175, 0.1458, 0.1512], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0317, 0.0452, 0.0460, 0.0378, 0.0437, 0.0346, 0.0402], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 11:50:18,614 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.341e+02 2.044e+02 2.405e+02 3.038e+02 6.612e+02, threshold=4.810e+02, percent-clipped=4.0 +2023-04-26 11:50:21,172 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6622.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:50:25,267 INFO [finetune.py:976] (0/7) Epoch 2, batch 900, loss[loss=0.2607, simple_loss=0.2861, pruned_loss=0.1176, over 4249.00 frames. ], tot_loss[loss=0.2805, simple_loss=0.3232, pruned_loss=0.1189, over 944548.33 frames. ], batch size: 65, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:50:25,985 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6629.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:50:36,404 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6646.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:50:54,945 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-04-26 11:50:58,407 INFO [finetune.py:976] (0/7) Epoch 2, batch 950, loss[loss=0.268, simple_loss=0.3162, pruned_loss=0.1099, over 4904.00 frames. ], tot_loss[loss=0.2773, simple_loss=0.3196, pruned_loss=0.1176, over 945749.09 frames. ], batch size: 43, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:51:06,474 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6690.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:51:08,883 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=6694.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:51:11,946 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6699.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:51:41,854 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.328e+02 2.213e+02 2.518e+02 3.022e+02 8.936e+02, threshold=5.037e+02, percent-clipped=4.0 +2023-04-26 11:51:48,517 INFO [finetune.py:976] (0/7) Epoch 2, batch 1000, loss[loss=0.3048, simple_loss=0.3343, pruned_loss=0.1376, over 4083.00 frames. ], tot_loss[loss=0.2807, simple_loss=0.3228, pruned_loss=0.1193, over 947664.61 frames. ], batch size: 65, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:52:11,813 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=6747.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:52:21,484 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-04-26 11:52:23,203 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4910, 1.1026, 1.1709, 1.0932, 1.7162, 1.4292, 1.1592, 1.0959], + device='cuda:0'), covar=tensor([0.1406, 0.1701, 0.2188, 0.1889, 0.0820, 0.1563, 0.1972, 0.1980], + device='cuda:0'), in_proj_covar=tensor([0.0328, 0.0343, 0.0346, 0.0315, 0.0353, 0.0377, 0.0326, 0.0359], + device='cuda:0'), out_proj_covar=tensor([7.1805e-05, 7.3780e-05, 7.4688e-05, 6.6075e-05, 7.5302e-05, 8.2509e-05, + 7.1023e-05, 7.7449e-05], device='cuda:0') +2023-04-26 11:52:34,048 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-26 11:52:55,093 INFO [finetune.py:976] (0/7) Epoch 2, batch 1050, loss[loss=0.3274, simple_loss=0.3662, pruned_loss=0.1443, over 4858.00 frames. ], tot_loss[loss=0.284, simple_loss=0.3266, pruned_loss=0.1207, over 948908.31 frames. ], batch size: 44, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:52:57,707 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4767, 1.1898, 1.5766, 1.5014, 1.3567, 1.0794, 1.2640, 0.8113], + device='cuda:0'), covar=tensor([0.0669, 0.1072, 0.0648, 0.0736, 0.0923, 0.1444, 0.0784, 0.1214], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0079, 0.0075, 0.0074, 0.0088, 0.0095, 0.0089, 0.0080], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 11:53:03,721 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6791.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:53:29,357 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2357, 1.3910, 1.0427, 1.4220, 1.3322, 1.0815, 1.2545, 0.9578], + device='cuda:0'), covar=tensor([0.1705, 0.1539, 0.1549, 0.1492, 0.2942, 0.1621, 0.1885, 0.2555], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0333, 0.0244, 0.0310, 0.0312, 0.0283, 0.0279, 0.0300], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 11:53:33,223 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.642e+02 2.078e+02 2.516e+02 2.901e+02 4.864e+02, threshold=5.033e+02, percent-clipped=0.0 +2023-04-26 11:53:33,305 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6818.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 11:53:42,743 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6823.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:53:44,637 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6826.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:53:51,480 INFO [finetune.py:976] (0/7) Epoch 2, batch 1100, loss[loss=0.2739, simple_loss=0.3211, pruned_loss=0.1134, over 4814.00 frames. ], tot_loss[loss=0.2849, simple_loss=0.3283, pruned_loss=0.1207, over 950898.23 frames. ], batch size: 45, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:54:04,439 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=6839.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:54:16,585 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6859.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:54:22,442 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6867.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:54:25,967 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0636, 0.6461, 0.9154, 0.5844, 1.1766, 0.9100, 0.7025, 0.9878], + device='cuda:0'), covar=tensor([0.1882, 0.1973, 0.2305, 0.2348, 0.1081, 0.1657, 0.2069, 0.2041], + device='cuda:0'), in_proj_covar=tensor([0.0330, 0.0343, 0.0347, 0.0316, 0.0354, 0.0378, 0.0328, 0.0359], + device='cuda:0'), out_proj_covar=tensor([7.2192e-05, 7.3794e-05, 7.4858e-05, 6.6220e-05, 7.5392e-05, 8.2723e-05, + 7.1379e-05, 7.7604e-05], device='cuda:0') +2023-04-26 11:54:27,619 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=6874.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:54:30,495 INFO [finetune.py:976] (0/7) Epoch 2, batch 1150, loss[loss=0.2476, simple_loss=0.2911, pruned_loss=0.1021, over 4743.00 frames. ], tot_loss[loss=0.2864, simple_loss=0.3298, pruned_loss=0.1215, over 952981.63 frames. ], batch size: 26, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:54:30,611 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6878.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 11:54:37,853 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0450, 0.6397, 0.8921, 0.5921, 1.2160, 0.9161, 0.7653, 0.9958], + device='cuda:0'), covar=tensor([0.1873, 0.2136, 0.2527, 0.2572, 0.1111, 0.2044, 0.2102, 0.2243], + device='cuda:0'), in_proj_covar=tensor([0.0329, 0.0342, 0.0347, 0.0316, 0.0354, 0.0377, 0.0328, 0.0359], + device='cuda:0'), out_proj_covar=tensor([7.2142e-05, 7.3737e-05, 7.4807e-05, 6.6232e-05, 7.5358e-05, 8.2712e-05, + 7.1326e-05, 7.7616e-05], device='cuda:0') +2023-04-26 11:54:49,244 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=6907.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:54:50,547 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=6909.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:54:53,708 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.83 vs. limit=5.0 +2023-04-26 11:54:54,156 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=6915.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:54:56,910 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 2.231e+02 2.530e+02 3.126e+02 7.008e+02, threshold=5.060e+02, percent-clipped=3.0 +2023-04-26 11:54:57,220 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-26 11:54:59,916 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=6922.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:55:04,519 INFO [finetune.py:976] (0/7) Epoch 2, batch 1200, loss[loss=0.2921, simple_loss=0.322, pruned_loss=0.1311, over 4779.00 frames. ], tot_loss[loss=0.2851, simple_loss=0.3285, pruned_loss=0.1209, over 953829.61 frames. ], batch size: 26, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:55:11,769 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6939.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 11:55:17,682 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4678, 1.2246, 1.8248, 1.6737, 1.2967, 1.0629, 1.4784, 0.9965], + device='cuda:0'), covar=tensor([0.1052, 0.1243, 0.0660, 0.1293, 0.1486, 0.1703, 0.1165, 0.1535], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0080, 0.0076, 0.0075, 0.0088, 0.0095, 0.0090, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 11:55:31,682 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:55:31,749 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=6970.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:55:35,796 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4457, 1.4101, 1.4849, 1.4885, 1.4914, 1.6553, 1.6966, 1.5502], + device='cuda:0'), covar=tensor([1.7094, 4.1852, 2.8852, 2.4038, 2.8970, 4.3998, 3.7239, 3.0871], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0373, 0.0300, 0.0300, 0.0329, 0.0355, 0.0361, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 11:55:38,016 INFO [finetune.py:976] (0/7) Epoch 2, batch 1250, loss[loss=0.2531, simple_loss=0.2956, pruned_loss=0.1053, over 4905.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3232, pruned_loss=0.1181, over 955427.97 frames. ], batch size: 32, lr: 4.00e-03, grad_scale: 32.0 +2023-04-26 11:55:43,399 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=6985.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:56:04,202 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.519e+02 2.106e+02 2.592e+02 3.021e+02 5.805e+02, threshold=5.184e+02, percent-clipped=1.0 +2023-04-26 11:56:11,779 INFO [finetune.py:976] (0/7) Epoch 2, batch 1300, loss[loss=0.2752, simple_loss=0.3132, pruned_loss=0.1186, over 4829.00 frames. ], tot_loss[loss=0.2747, simple_loss=0.3183, pruned_loss=0.1156, over 957222.20 frames. ], batch size: 39, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 11:56:21,385 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-04-26 11:56:25,532 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7048.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 11:56:55,868 INFO [finetune.py:976] (0/7) Epoch 2, batch 1350, loss[loss=0.2619, simple_loss=0.3236, pruned_loss=0.1001, over 4835.00 frames. ], tot_loss[loss=0.2728, simple_loss=0.3168, pruned_loss=0.1144, over 957366.57 frames. ], batch size: 47, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 11:57:28,624 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7109.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 11:57:39,841 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.388e+02 2.079e+02 2.494e+02 3.022e+02 7.754e+02, threshold=4.988e+02, percent-clipped=2.0 +2023-04-26 11:57:39,943 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7118.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 11:57:48,832 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7123.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:57:56,136 INFO [finetune.py:976] (0/7) Epoch 2, batch 1400, loss[loss=0.3155, simple_loss=0.352, pruned_loss=0.1395, over 4805.00 frames. ], tot_loss[loss=0.2758, simple_loss=0.3202, pruned_loss=0.1157, over 954785.30 frames. ], batch size: 45, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 11:58:25,958 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-26 11:58:36,323 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7163.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:58:38,096 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=7166.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:58:47,201 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=7171.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:58:51,414 INFO [finetune.py:976] (0/7) Epoch 2, batch 1450, loss[loss=0.3639, simple_loss=0.3934, pruned_loss=0.1672, over 4207.00 frames. ], tot_loss[loss=0.2787, simple_loss=0.3235, pruned_loss=0.117, over 955096.69 frames. ], batch size: 65, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 11:59:15,258 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9110, 2.0178, 1.9446, 1.6884, 2.1318, 1.6994, 2.7048, 1.6542], + device='cuda:0'), covar=tensor([0.3977, 0.1569, 0.4113, 0.2478, 0.1668, 0.2398, 0.1279, 0.3981], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0348, 0.0432, 0.0363, 0.0398, 0.0371, 0.0394, 0.0406], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 11:59:19,360 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.461e+02 2.205e+02 2.539e+02 2.992e+02 8.732e+02, threshold=5.079e+02, percent-clipped=2.0 +2023-04-26 11:59:23,174 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7224.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 11:59:24,333 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7662, 3.6726, 2.8172, 4.2845, 3.7046, 3.7058, 1.6939, 3.5838], + device='cuda:0'), covar=tensor([0.1522, 0.1088, 0.3161, 0.1645, 0.2615, 0.1825, 0.5255, 0.2124], + device='cuda:0'), in_proj_covar=tensor([0.0259, 0.0231, 0.0278, 0.0328, 0.0322, 0.0273, 0.0286, 0.0287], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 11:59:25,497 INFO [finetune.py:976] (0/7) Epoch 2, batch 1500, loss[loss=0.3196, simple_loss=0.3733, pruned_loss=0.1329, over 4896.00 frames. ], tot_loss[loss=0.2813, simple_loss=0.3258, pruned_loss=0.1184, over 956552.58 frames. ], batch size: 43, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 11:59:29,685 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7234.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 11:59:45,522 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5295, 0.6945, 0.9523, 1.0496, 1.1702, 1.2815, 0.9318, 1.0438], + device='cuda:0'), covar=tensor([2.7914, 6.0753, 4.3477, 3.7661, 4.1715, 7.0075, 5.6806, 4.5487], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0373, 0.0299, 0.0299, 0.0328, 0.0356, 0.0360, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 11:59:51,525 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7265.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:59:57,747 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7275.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 11:59:59,532 INFO [finetune.py:976] (0/7) Epoch 2, batch 1550, loss[loss=0.2194, simple_loss=0.2706, pruned_loss=0.08407, over 4708.00 frames. ], tot_loss[loss=0.2808, simple_loss=0.3256, pruned_loss=0.118, over 953711.22 frames. ], batch size: 23, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:00:04,012 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7285.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:00:27,655 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.588e+02 2.228e+02 2.487e+02 3.016e+02 5.047e+02, threshold=4.974e+02, percent-clipped=0.0 +2023-04-26 12:00:33,829 INFO [finetune.py:976] (0/7) Epoch 2, batch 1600, loss[loss=0.2979, simple_loss=0.3344, pruned_loss=0.1307, over 4260.00 frames. ], tot_loss[loss=0.2797, simple_loss=0.3244, pruned_loss=0.1175, over 953833.49 frames. ], batch size: 66, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:00:35,744 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4281, 0.9150, 0.3906, 1.1081, 1.1680, 1.3276, 1.1999, 1.2130], + device='cuda:0'), covar=tensor([0.0620, 0.0521, 0.0569, 0.0661, 0.0383, 0.0623, 0.0597, 0.0726], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0048, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0051], + device='cuda:0') +2023-04-26 12:00:36,964 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=7333.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:00:38,891 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7336.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:00:58,926 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-26 12:01:01,227 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-26 12:01:07,694 INFO [finetune.py:976] (0/7) Epoch 2, batch 1650, loss[loss=0.2336, simple_loss=0.2776, pruned_loss=0.09482, over 4835.00 frames. ], tot_loss[loss=0.2757, simple_loss=0.3202, pruned_loss=0.1156, over 955378.68 frames. ], batch size: 44, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:01:25,944 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7404.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 12:01:28,247 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7407.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:01:34,835 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.570e+02 2.142e+02 2.529e+02 2.973e+02 5.158e+02, threshold=5.058e+02, percent-clipped=1.0 +2023-04-26 12:01:41,461 INFO [finetune.py:976] (0/7) Epoch 2, batch 1700, loss[loss=0.2944, simple_loss=0.3373, pruned_loss=0.1257, over 4832.00 frames. ], tot_loss[loss=0.2712, simple_loss=0.316, pruned_loss=0.1131, over 956399.98 frames. ], batch size: 47, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:02:06,077 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6924, 2.4004, 1.6446, 1.5037, 1.2470, 1.2961, 1.7181, 1.2396], + device='cuda:0'), covar=tensor([0.2514, 0.2196, 0.2607, 0.3370, 0.3966, 0.2885, 0.2115, 0.3102], + device='cuda:0'), in_proj_covar=tensor([0.0200, 0.0225, 0.0199, 0.0221, 0.0238, 0.0198, 0.0194, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 12:02:09,792 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7468.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:02:22,161 INFO [finetune.py:976] (0/7) Epoch 2, batch 1750, loss[loss=0.2509, simple_loss=0.3117, pruned_loss=0.09507, over 4944.00 frames. ], tot_loss[loss=0.2754, simple_loss=0.3198, pruned_loss=0.1155, over 956009.99 frames. ], batch size: 33, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:03:04,393 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6356, 2.3021, 1.4487, 1.4113, 1.1685, 1.2403, 1.5137, 1.0818], + device='cuda:0'), covar=tensor([0.2671, 0.2329, 0.3038, 0.3744, 0.4128, 0.3192, 0.2358, 0.3389], + device='cuda:0'), in_proj_covar=tensor([0.0200, 0.0225, 0.0199, 0.0220, 0.0237, 0.0198, 0.0194, 0.0210], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 12:03:11,385 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.401e+02 2.126e+02 2.582e+02 2.994e+02 4.895e+02, threshold=5.165e+02, percent-clipped=0.0 +2023-04-26 12:03:12,102 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7519.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 12:03:23,648 INFO [finetune.py:976] (0/7) Epoch 2, batch 1800, loss[loss=0.3399, simple_loss=0.3777, pruned_loss=0.1511, over 4866.00 frames. ], tot_loss[loss=0.2803, simple_loss=0.3248, pruned_loss=0.1179, over 954653.81 frames. ], batch size: 44, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:03:27,300 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7534.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 12:03:45,092 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9165, 4.0869, 0.8247, 2.1076, 2.3269, 2.8196, 2.5530, 0.9830], + device='cuda:0'), covar=tensor([0.1178, 0.0926, 0.2263, 0.1296, 0.0952, 0.1065, 0.1443, 0.2206], + device='cuda:0'), in_proj_covar=tensor([0.0125, 0.0275, 0.0153, 0.0134, 0.0145, 0.0167, 0.0131, 0.0135], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 12:04:09,579 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6453, 1.1770, 1.3578, 1.4210, 1.2142, 1.1005, 0.5863, 1.0191], + device='cuda:0'), covar=tensor([0.6268, 0.6958, 0.3459, 0.6242, 0.7107, 0.5475, 0.9613, 0.6832], + device='cuda:0'), in_proj_covar=tensor([0.0266, 0.0281, 0.0223, 0.0348, 0.0239, 0.0236, 0.0274, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:04:12,460 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7565.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:04:31,077 INFO [finetune.py:976] (0/7) Epoch 2, batch 1850, loss[loss=0.2695, simple_loss=0.3035, pruned_loss=0.1177, over 4837.00 frames. ], tot_loss[loss=0.28, simple_loss=0.3251, pruned_loss=0.1175, over 957028.68 frames. ], batch size: 25, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:04:34,056 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=7582.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 12:04:55,473 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-26 12:05:07,095 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=7613.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:05:10,536 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.515e+02 2.096e+02 2.728e+02 3.385e+02 6.441e+02, threshold=5.455e+02, percent-clipped=3.0 +2023-04-26 12:05:14,362 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7624.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:05:16,718 INFO [finetune.py:976] (0/7) Epoch 2, batch 1900, loss[loss=0.2851, simple_loss=0.3375, pruned_loss=0.1164, over 4883.00 frames. ], tot_loss[loss=0.2796, simple_loss=0.3252, pruned_loss=0.117, over 956025.64 frames. ], batch size: 32, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:05:18,622 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7631.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:05:50,038 INFO [finetune.py:976] (0/7) Epoch 2, batch 1950, loss[loss=0.2682, simple_loss=0.3148, pruned_loss=0.1108, over 4818.00 frames. ], tot_loss[loss=0.2772, simple_loss=0.3229, pruned_loss=0.1157, over 954719.10 frames. ], batch size: 40, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:05:54,903 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7685.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:06:06,523 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7704.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 12:06:11,336 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-26 12:06:17,408 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.610e+02 2.113e+02 2.438e+02 2.790e+02 7.164e+02, threshold=4.875e+02, percent-clipped=1.0 +2023-04-26 12:06:20,592 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-26 12:06:23,979 INFO [finetune.py:976] (0/7) Epoch 2, batch 2000, loss[loss=0.2705, simple_loss=0.3066, pruned_loss=0.1172, over 4826.00 frames. ], tot_loss[loss=0.2731, simple_loss=0.3186, pruned_loss=0.1137, over 957508.40 frames. ], batch size: 33, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:06:24,238 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-26 12:06:32,577 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.03 vs. limit=5.0 +2023-04-26 12:06:33,721 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7743.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:06:39,113 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=7752.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 12:06:42,162 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4739, 1.5639, 1.7483, 1.7604, 1.8787, 1.4037, 1.0559, 1.5768], + device='cuda:0'), covar=tensor([0.1165, 0.1209, 0.0719, 0.0780, 0.0736, 0.1125, 0.1210, 0.0832], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0208, 0.0190, 0.0182, 0.0179, 0.0198, 0.0175, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:06:46,777 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7763.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:06:57,780 INFO [finetune.py:976] (0/7) Epoch 2, batch 2050, loss[loss=0.2194, simple_loss=0.2744, pruned_loss=0.08224, over 4885.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3146, pruned_loss=0.1118, over 955832.27 frames. ], batch size: 32, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:07:13,084 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=7802.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:07:14,315 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7804.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:07:24,632 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.397e+02 2.038e+02 2.548e+02 3.001e+02 7.131e+02, threshold=5.096e+02, percent-clipped=2.0 +2023-04-26 12:07:25,356 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7819.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 12:07:31,148 INFO [finetune.py:976] (0/7) Epoch 2, batch 2100, loss[loss=0.3291, simple_loss=0.3544, pruned_loss=0.152, over 4717.00 frames. ], tot_loss[loss=0.2688, simple_loss=0.3141, pruned_loss=0.1118, over 954433.46 frames. ], batch size: 59, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:07:43,907 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1108, 1.4022, 1.3655, 1.8130, 2.0603, 1.7682, 1.6208, 1.4740], + device='cuda:0'), covar=tensor([0.2091, 0.2215, 0.2348, 0.2319, 0.1628, 0.2377, 0.2685, 0.2079], + device='cuda:0'), in_proj_covar=tensor([0.0327, 0.0341, 0.0346, 0.0317, 0.0354, 0.0373, 0.0326, 0.0358], + device='cuda:0'), out_proj_covar=tensor([7.1534e-05, 7.3482e-05, 7.4805e-05, 6.6457e-05, 7.5520e-05, 8.1832e-05, + 7.0946e-05, 7.7207e-05], device='cuda:0') +2023-04-26 12:07:54,037 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=7863.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:07:56,430 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=7867.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:08:15,064 INFO [finetune.py:976] (0/7) Epoch 2, batch 2150, loss[loss=0.293, simple_loss=0.3469, pruned_loss=0.1195, over 4902.00 frames. ], tot_loss[loss=0.2707, simple_loss=0.3167, pruned_loss=0.1124, over 954022.39 frames. ], batch size: 35, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:09:02,673 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 2.251e+02 2.703e+02 3.381e+02 5.777e+02, threshold=5.406e+02, percent-clipped=2.0 +2023-04-26 12:09:20,979 INFO [finetune.py:976] (0/7) Epoch 2, batch 2200, loss[loss=0.298, simple_loss=0.334, pruned_loss=0.131, over 4761.00 frames. ], tot_loss[loss=0.2735, simple_loss=0.3195, pruned_loss=0.1137, over 953650.75 frames. ], batch size: 54, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:09:22,954 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=7931.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:09:34,367 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6539, 2.1865, 1.6822, 2.0663, 1.7006, 1.5966, 1.7605, 1.3936], + device='cuda:0'), covar=tensor([0.2217, 0.1416, 0.1288, 0.1495, 0.2952, 0.1778, 0.2101, 0.2932], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0337, 0.0247, 0.0313, 0.0316, 0.0287, 0.0281, 0.0303], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:10:21,351 INFO [finetune.py:976] (0/7) Epoch 2, batch 2250, loss[loss=0.2832, simple_loss=0.3454, pruned_loss=0.1105, over 4822.00 frames. ], tot_loss[loss=0.2726, simple_loss=0.3191, pruned_loss=0.1131, over 952772.93 frames. ], batch size: 38, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:10:22,015 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=7979.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:10:23,121 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=7980.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:10:34,044 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3997, 2.3557, 2.6326, 2.9635, 2.6613, 2.0866, 1.6042, 2.3064], + device='cuda:0'), covar=tensor([0.1134, 0.0992, 0.0628, 0.0697, 0.0714, 0.1158, 0.1316, 0.0775], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0211, 0.0192, 0.0184, 0.0182, 0.0200, 0.0178, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:10:35,282 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-8000.pt +2023-04-26 12:10:47,701 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.449e+02 2.028e+02 2.420e+02 2.875e+02 5.113e+02, threshold=4.840e+02, percent-clipped=0.0 +2023-04-26 12:10:55,295 INFO [finetune.py:976] (0/7) Epoch 2, batch 2300, loss[loss=0.3156, simple_loss=0.3532, pruned_loss=0.1391, over 4786.00 frames. ], tot_loss[loss=0.2719, simple_loss=0.3192, pruned_loss=0.1123, over 953636.61 frames. ], batch size: 51, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:11:00,485 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6048, 1.1140, 1.3516, 1.4629, 1.2111, 1.0605, 0.6032, 0.9905], + device='cuda:0'), covar=tensor([0.6416, 0.7685, 0.3753, 0.6235, 0.7741, 0.5789, 1.0950, 0.7431], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0283, 0.0226, 0.0352, 0.0241, 0.0239, 0.0277, 0.0225], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:11:18,749 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8063.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:11:28,368 INFO [finetune.py:976] (0/7) Epoch 2, batch 2350, loss[loss=0.2192, simple_loss=0.2778, pruned_loss=0.08032, over 4764.00 frames. ], tot_loss[loss=0.2678, simple_loss=0.3152, pruned_loss=0.1102, over 954713.67 frames. ], batch size: 26, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:11:30,210 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8310, 1.2340, 1.6001, 1.9761, 1.4662, 1.1942, 0.8975, 1.4160], + device='cuda:0'), covar=tensor([0.6378, 0.7566, 0.3629, 0.6178, 0.8041, 0.5735, 1.0399, 0.7257], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0282, 0.0226, 0.0352, 0.0241, 0.0238, 0.0277, 0.0224], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:11:31,413 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-26 12:11:42,661 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8099.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:11:43,389 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-26 12:11:50,385 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=8111.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:11:54,657 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 2.063e+02 2.510e+02 2.746e+02 4.758e+02, threshold=5.020e+02, percent-clipped=0.0 +2023-04-26 12:12:01,654 INFO [finetune.py:976] (0/7) Epoch 2, batch 2400, loss[loss=0.2691, simple_loss=0.3159, pruned_loss=0.1112, over 4791.00 frames. ], tot_loss[loss=0.2643, simple_loss=0.3114, pruned_loss=0.1086, over 955476.89 frames. ], batch size: 25, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:12:02,383 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5023, 1.4052, 1.5173, 1.8459, 1.9128, 1.4477, 1.0444, 1.5930], + device='cuda:0'), covar=tensor([0.1255, 0.1625, 0.1049, 0.0802, 0.0724, 0.1161, 0.1318, 0.0882], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0211, 0.0191, 0.0184, 0.0181, 0.0200, 0.0178, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:12:21,844 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8158.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:12:34,354 INFO [finetune.py:976] (0/7) Epoch 2, batch 2450, loss[loss=0.2389, simple_loss=0.2974, pruned_loss=0.09017, over 4930.00 frames. ], tot_loss[loss=0.2615, simple_loss=0.3085, pruned_loss=0.1073, over 956015.94 frames. ], batch size: 38, lr: 3.99e-03, grad_scale: 64.0 +2023-04-26 12:13:01,849 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.405e+02 2.093e+02 2.421e+02 2.814e+02 5.190e+02, threshold=4.843e+02, percent-clipped=1.0 +2023-04-26 12:13:07,998 INFO [finetune.py:976] (0/7) Epoch 2, batch 2500, loss[loss=0.2639, simple_loss=0.3234, pruned_loss=0.1022, over 4737.00 frames. ], tot_loss[loss=0.2651, simple_loss=0.3114, pruned_loss=0.1094, over 955710.71 frames. ], batch size: 59, lr: 3.99e-03, grad_scale: 64.0 +2023-04-26 12:13:49,849 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8265.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:14:03,522 INFO [finetune.py:976] (0/7) Epoch 2, batch 2550, loss[loss=0.2452, simple_loss=0.3168, pruned_loss=0.08676, over 4817.00 frames. ], tot_loss[loss=0.2687, simple_loss=0.3156, pruned_loss=0.1109, over 955030.80 frames. ], batch size: 38, lr: 3.99e-03, grad_scale: 64.0 +2023-04-26 12:14:04,283 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7718, 1.8589, 2.1142, 2.1819, 2.2158, 1.7165, 1.4297, 1.8377], + device='cuda:0'), covar=tensor([0.1543, 0.1211, 0.0741, 0.0988, 0.0810, 0.1307, 0.1414, 0.0876], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0211, 0.0191, 0.0184, 0.0181, 0.0199, 0.0177, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:14:04,870 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8280.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:14:39,231 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0683, 1.2174, 1.6835, 2.3228, 1.5840, 1.2386, 1.1041, 1.5888], + device='cuda:0'), covar=tensor([0.6360, 0.7994, 0.3581, 0.7812, 0.9020, 0.6077, 1.1158, 0.8655], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0281, 0.0225, 0.0350, 0.0239, 0.0237, 0.0275, 0.0223], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:15:02,137 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.269e+02 2.032e+02 2.420e+02 2.910e+02 5.283e+02, threshold=4.841e+02, percent-clipped=1.0 +2023-04-26 12:15:13,541 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8326.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:15:20,191 INFO [finetune.py:976] (0/7) Epoch 2, batch 2600, loss[loss=0.2589, simple_loss=0.313, pruned_loss=0.1023, over 4815.00 frames. ], tot_loss[loss=0.2697, simple_loss=0.3169, pruned_loss=0.1112, over 954967.28 frames. ], batch size: 39, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:15:20,247 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=8328.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:16:03,726 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-26 12:16:04,712 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.1288, 3.1486, 2.7343, 3.6509, 3.1116, 3.0864, 1.6375, 3.1357], + device='cuda:0'), covar=tensor([0.1854, 0.1474, 0.3607, 0.2172, 0.2509, 0.1966, 0.4879, 0.2292], + device='cuda:0'), in_proj_covar=tensor([0.0261, 0.0234, 0.0278, 0.0328, 0.0323, 0.0273, 0.0288, 0.0288], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:16:17,384 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9869, 0.6740, 0.8389, 0.6804, 1.1030, 0.9404, 0.7365, 0.8561], + device='cuda:0'), covar=tensor([0.1382, 0.1599, 0.1735, 0.1654, 0.0979, 0.1312, 0.1533, 0.1691], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0338, 0.0345, 0.0314, 0.0351, 0.0370, 0.0322, 0.0355], + device='cuda:0'), out_proj_covar=tensor([7.0609e-05, 7.2729e-05, 7.4537e-05, 6.5882e-05, 7.4776e-05, 8.0935e-05, + 7.0221e-05, 7.6640e-05], device='cuda:0') +2023-04-26 12:16:23,383 INFO [finetune.py:976] (0/7) Epoch 2, batch 2650, loss[loss=0.2862, simple_loss=0.3341, pruned_loss=0.1191, over 4899.00 frames. ], tot_loss[loss=0.2727, simple_loss=0.3195, pruned_loss=0.113, over 952842.18 frames. ], batch size: 36, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:16:49,115 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8399.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:16:59,288 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1440, 1.3400, 1.3293, 1.8030, 2.1300, 1.8448, 1.6877, 1.6428], + device='cuda:0'), covar=tensor([0.1763, 0.2490, 0.2645, 0.2647, 0.1734, 0.2525, 0.3198, 0.2173], + device='cuda:0'), in_proj_covar=tensor([0.0325, 0.0339, 0.0347, 0.0316, 0.0352, 0.0372, 0.0324, 0.0357], + device='cuda:0'), out_proj_covar=tensor([7.0972e-05, 7.3010e-05, 7.4965e-05, 6.6234e-05, 7.5061e-05, 8.1416e-05, + 7.0626e-05, 7.7046e-05], device='cuda:0') +2023-04-26 12:17:13,449 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.592e+02 2.071e+02 2.411e+02 3.038e+02 9.260e+02, threshold=4.823e+02, percent-clipped=4.0 +2023-04-26 12:17:19,412 INFO [finetune.py:976] (0/7) Epoch 2, batch 2700, loss[loss=0.2801, simple_loss=0.3277, pruned_loss=0.1162, over 4854.00 frames. ], tot_loss[loss=0.2717, simple_loss=0.3184, pruned_loss=0.1125, over 954168.74 frames. ], batch size: 44, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:17:32,535 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=8447.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:17:40,742 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8458.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:17:53,353 INFO [finetune.py:976] (0/7) Epoch 2, batch 2750, loss[loss=0.2538, simple_loss=0.3004, pruned_loss=0.1036, over 4884.00 frames. ], tot_loss[loss=0.2691, simple_loss=0.3149, pruned_loss=0.1117, over 954965.86 frames. ], batch size: 35, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:18:12,774 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=8506.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:18:21,016 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.402e+02 1.950e+02 2.341e+02 2.831e+02 5.180e+02, threshold=4.681e+02, percent-clipped=1.0 +2023-04-26 12:18:26,543 INFO [finetune.py:976] (0/7) Epoch 2, batch 2800, loss[loss=0.2284, simple_loss=0.2709, pruned_loss=0.09294, over 4740.00 frames. ], tot_loss[loss=0.2645, simple_loss=0.31, pruned_loss=0.1095, over 956353.11 frames. ], batch size: 59, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:19:00,281 INFO [finetune.py:976] (0/7) Epoch 2, batch 2850, loss[loss=0.3458, simple_loss=0.3737, pruned_loss=0.1589, over 4813.00 frames. ], tot_loss[loss=0.2613, simple_loss=0.3075, pruned_loss=0.1076, over 955782.18 frames. ], batch size: 45, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:19:42,388 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8694, 1.7894, 2.0016, 2.2169, 2.1297, 1.7453, 1.4415, 1.9285], + device='cuda:0'), covar=tensor([0.1063, 0.0993, 0.0585, 0.0702, 0.0703, 0.1050, 0.1186, 0.0667], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0210, 0.0189, 0.0182, 0.0180, 0.0198, 0.0176, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:19:44,284 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-26 12:19:45,300 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.328e+02 2.166e+02 2.478e+02 2.838e+02 4.479e+02, threshold=4.956e+02, percent-clipped=0.0 +2023-04-26 12:19:52,096 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=8621.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:19:52,760 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4883, 1.4438, 1.6153, 1.8708, 1.8458, 1.4632, 1.0999, 1.7002], + device='cuda:0'), covar=tensor([0.1181, 0.1308, 0.0842, 0.0782, 0.0747, 0.1102, 0.1265, 0.0664], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0210, 0.0190, 0.0183, 0.0180, 0.0199, 0.0176, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:19:56,369 INFO [finetune.py:976] (0/7) Epoch 2, batch 2900, loss[loss=0.2991, simple_loss=0.3474, pruned_loss=0.1254, over 4905.00 frames. ], tot_loss[loss=0.2637, simple_loss=0.31, pruned_loss=0.1087, over 954908.32 frames. ], batch size: 43, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:21:07,310 INFO [finetune.py:976] (0/7) Epoch 2, batch 2950, loss[loss=0.3238, simple_loss=0.3282, pruned_loss=0.1597, over 4573.00 frames. ], tot_loss[loss=0.267, simple_loss=0.3135, pruned_loss=0.1102, over 954217.65 frames. ], batch size: 20, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:21:11,302 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-26 12:21:53,499 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-26 12:22:03,513 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.340e+02 2.015e+02 2.424e+02 2.946e+02 5.788e+02, threshold=4.848e+02, percent-clipped=1.0 +2023-04-26 12:22:15,451 INFO [finetune.py:976] (0/7) Epoch 2, batch 3000, loss[loss=0.2873, simple_loss=0.3343, pruned_loss=0.1202, over 4721.00 frames. ], tot_loss[loss=0.2703, simple_loss=0.3169, pruned_loss=0.1119, over 955440.92 frames. ], batch size: 59, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:22:15,452 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 12:22:18,503 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4229, 1.4856, 3.8709, 3.5193, 3.4546, 3.6775, 3.8290, 3.3989], + device='cuda:0'), covar=tensor([0.7228, 0.5451, 0.1462, 0.2644, 0.1489, 0.1529, 0.0953, 0.1882], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0317, 0.0451, 0.0459, 0.0382, 0.0436, 0.0345, 0.0403], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 12:22:19,247 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3997, 2.9877, 1.0416, 1.6134, 1.8162, 2.2832, 1.9508, 0.9558], + device='cuda:0'), covar=tensor([0.1459, 0.0925, 0.1989, 0.1591, 0.1106, 0.1021, 0.1533, 0.1960], + device='cuda:0'), in_proj_covar=tensor([0.0125, 0.0275, 0.0153, 0.0134, 0.0145, 0.0168, 0.0131, 0.0136], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 12:22:32,548 INFO [finetune.py:1010] (0/7) Epoch 2, validation: loss=0.1863, simple_loss=0.2571, pruned_loss=0.0578, over 2265189.00 frames. +2023-04-26 12:22:32,548 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6243MB +2023-04-26 12:22:38,460 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9024, 2.5955, 2.0409, 2.4370, 1.9510, 2.1297, 2.4384, 1.7588], + device='cuda:0'), covar=tensor([0.2992, 0.2057, 0.1451, 0.1872, 0.3338, 0.1713, 0.2413, 0.3409], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0338, 0.0247, 0.0313, 0.0318, 0.0288, 0.0281, 0.0304], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:23:01,303 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-26 12:23:09,065 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8760.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:23:21,915 INFO [finetune.py:976] (0/7) Epoch 2, batch 3050, loss[loss=0.2664, simple_loss=0.3145, pruned_loss=0.1091, over 4745.00 frames. ], tot_loss[loss=0.2715, simple_loss=0.3185, pruned_loss=0.1123, over 954207.51 frames. ], batch size: 26, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:23:29,092 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.5905, 3.6007, 2.6738, 4.0973, 3.5845, 3.5189, 1.5074, 3.5060], + device='cuda:0'), covar=tensor([0.1536, 0.1117, 0.3634, 0.1763, 0.2829, 0.1885, 0.5291, 0.2143], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0231, 0.0273, 0.0323, 0.0319, 0.0268, 0.0283, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:23:49,150 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.298e+02 2.073e+02 2.370e+02 3.091e+02 5.908e+02, threshold=4.740e+02, percent-clipped=3.0 +2023-04-26 12:23:50,990 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8821.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:23:56,176 INFO [finetune.py:976] (0/7) Epoch 2, batch 3100, loss[loss=0.2011, simple_loss=0.2597, pruned_loss=0.07119, over 4802.00 frames. ], tot_loss[loss=0.2665, simple_loss=0.3139, pruned_loss=0.1095, over 953250.08 frames. ], batch size: 51, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:24:02,177 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8875, 1.5776, 1.3498, 1.5310, 2.1545, 1.7730, 1.3782, 1.2961], + device='cuda:0'), covar=tensor([0.1670, 0.2009, 0.2616, 0.1855, 0.0973, 0.1950, 0.3150, 0.2330], + device='cuda:0'), in_proj_covar=tensor([0.0327, 0.0342, 0.0350, 0.0317, 0.0354, 0.0375, 0.0326, 0.0359], + device='cuda:0'), out_proj_covar=tensor([7.1501e-05, 7.3458e-05, 7.5672e-05, 6.6498e-05, 7.5398e-05, 8.2034e-05, + 7.1021e-05, 7.7510e-05], device='cuda:0') +2023-04-26 12:24:02,198 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3328, 1.5738, 1.4018, 1.4862, 1.4870, 1.6123, 1.5409, 1.5406], + device='cuda:0'), covar=tensor([2.1010, 3.9238, 3.2399, 2.7766, 3.0189, 4.9549, 4.1360, 3.3352], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0378, 0.0303, 0.0305, 0.0333, 0.0368, 0.0365, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:24:29,317 INFO [finetune.py:976] (0/7) Epoch 2, batch 3150, loss[loss=0.2424, simple_loss=0.2935, pruned_loss=0.0956, over 4917.00 frames. ], tot_loss[loss=0.2647, simple_loss=0.3113, pruned_loss=0.1091, over 954781.69 frames. ], batch size: 46, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:24:40,596 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9245, 3.8344, 2.7975, 4.4717, 3.9065, 3.8768, 1.7060, 3.8363], + device='cuda:0'), covar=tensor([0.1757, 0.1070, 0.2919, 0.1434, 0.2614, 0.1692, 0.5880, 0.2045], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0232, 0.0274, 0.0324, 0.0319, 0.0268, 0.0283, 0.0283], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:24:56,257 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.408e+02 2.116e+02 2.518e+02 3.010e+02 6.088e+02, threshold=5.037e+02, percent-clipped=1.0 +2023-04-26 12:24:57,586 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=8921.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:25:01,748 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8927.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:25:02,221 INFO [finetune.py:976] (0/7) Epoch 2, batch 3200, loss[loss=0.2847, simple_loss=0.3237, pruned_loss=0.1229, over 4823.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3065, pruned_loss=0.1062, over 956606.27 frames. ], batch size: 41, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:25:30,257 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=8969.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:25:34,667 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-26 12:25:41,812 INFO [finetune.py:976] (0/7) Epoch 2, batch 3250, loss[loss=0.3214, simple_loss=0.358, pruned_loss=0.1424, over 4747.00 frames. ], tot_loss[loss=0.2596, simple_loss=0.3067, pruned_loss=0.1063, over 956727.33 frames. ], batch size: 54, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:25:53,820 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=8986.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:25:55,566 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=8988.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:25:56,892 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-04-26 12:26:27,405 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9013.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:26:37,526 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 2.159e+02 2.478e+02 3.003e+02 4.851e+02, threshold=4.955e+02, percent-clipped=0.0 +2023-04-26 12:26:48,556 INFO [finetune.py:976] (0/7) Epoch 2, batch 3300, loss[loss=0.2764, simple_loss=0.3346, pruned_loss=0.1091, over 4933.00 frames. ], tot_loss[loss=0.2641, simple_loss=0.3117, pruned_loss=0.1082, over 957271.30 frames. ], batch size: 38, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:27:08,775 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9047.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:27:26,248 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9074.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:27:28,528 INFO [finetune.py:976] (0/7) Epoch 2, batch 3350, loss[loss=0.295, simple_loss=0.3373, pruned_loss=0.1264, over 4747.00 frames. ], tot_loss[loss=0.2663, simple_loss=0.3144, pruned_loss=0.1091, over 955280.62 frames. ], batch size: 54, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:28:00,086 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9116.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:28:07,186 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.418e+02 2.168e+02 2.587e+02 3.094e+02 5.996e+02, threshold=5.175e+02, percent-clipped=1.0 +2023-04-26 12:28:17,531 INFO [finetune.py:976] (0/7) Epoch 2, batch 3400, loss[loss=0.2821, simple_loss=0.3302, pruned_loss=0.117, over 4914.00 frames. ], tot_loss[loss=0.2683, simple_loss=0.3167, pruned_loss=0.1099, over 956650.65 frames. ], batch size: 38, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:28:56,066 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.70 vs. limit=5.0 +2023-04-26 12:29:01,795 INFO [finetune.py:976] (0/7) Epoch 2, batch 3450, loss[loss=0.3142, simple_loss=0.3453, pruned_loss=0.1415, over 4857.00 frames. ], tot_loss[loss=0.2666, simple_loss=0.3155, pruned_loss=0.1088, over 956241.44 frames. ], batch size: 44, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:29:11,329 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3194, 1.6561, 1.5261, 1.8716, 1.6715, 2.1656, 1.4490, 3.7151], + device='cuda:0'), covar=tensor([0.0720, 0.0775, 0.0799, 0.1285, 0.0689, 0.0542, 0.0779, 0.0194], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0041, 0.0042, 0.0047, 0.0042, 0.0041, 0.0041, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 12:29:29,510 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.432e+02 2.133e+02 2.511e+02 2.934e+02 6.196e+02, threshold=5.021e+02, percent-clipped=2.0 +2023-04-26 12:29:35,076 INFO [finetune.py:976] (0/7) Epoch 2, batch 3500, loss[loss=0.1762, simple_loss=0.2304, pruned_loss=0.06101, over 4756.00 frames. ], tot_loss[loss=0.2634, simple_loss=0.3119, pruned_loss=0.1075, over 955650.20 frames. ], batch size: 27, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:30:08,947 INFO [finetune.py:976] (0/7) Epoch 2, batch 3550, loss[loss=0.1944, simple_loss=0.2587, pruned_loss=0.06509, over 4783.00 frames. ], tot_loss[loss=0.2595, simple_loss=0.3082, pruned_loss=0.1054, over 956097.64 frames. ], batch size: 29, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:30:12,101 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9283.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:30:37,458 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 2.002e+02 2.435e+02 2.997e+02 6.904e+02, threshold=4.871e+02, percent-clipped=3.0 +2023-04-26 12:30:48,984 INFO [finetune.py:976] (0/7) Epoch 2, batch 3600, loss[loss=0.2667, simple_loss=0.3107, pruned_loss=0.1114, over 4823.00 frames. ], tot_loss[loss=0.2574, simple_loss=0.305, pruned_loss=0.1049, over 953700.61 frames. ], batch size: 41, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:31:03,419 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9342.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:31:44,881 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9369.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:31:55,586 INFO [finetune.py:976] (0/7) Epoch 2, batch 3650, loss[loss=0.2665, simple_loss=0.3288, pruned_loss=0.1021, over 4855.00 frames. ], tot_loss[loss=0.2576, simple_loss=0.3059, pruned_loss=0.1047, over 951111.02 frames. ], batch size: 49, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:32:25,432 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9414.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:32:27,067 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9416.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:32:28,804 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.507e+02 2.166e+02 2.483e+02 2.970e+02 6.745e+02, threshold=4.965e+02, percent-clipped=1.0 +2023-04-26 12:32:34,828 INFO [finetune.py:976] (0/7) Epoch 2, batch 3700, loss[loss=0.2942, simple_loss=0.349, pruned_loss=0.1197, over 4822.00 frames. ], tot_loss[loss=0.2594, simple_loss=0.3093, pruned_loss=0.1048, over 952956.20 frames. ], batch size: 45, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:32:58,812 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=9464.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:33:07,007 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9475.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:33:08,728 INFO [finetune.py:976] (0/7) Epoch 2, batch 3750, loss[loss=0.295, simple_loss=0.3401, pruned_loss=0.1249, over 4834.00 frames. ], tot_loss[loss=0.2616, simple_loss=0.3111, pruned_loss=0.1061, over 952571.52 frames. ], batch size: 44, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:33:45,960 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3739, 3.0959, 1.1889, 1.5036, 2.2903, 1.3198, 4.2050, 1.9124], + device='cuda:0'), covar=tensor([0.0701, 0.0740, 0.0960, 0.1375, 0.0573, 0.1135, 0.0226, 0.0698], + device='cuda:0'), in_proj_covar=tensor([0.0056, 0.0072, 0.0054, 0.0050, 0.0055, 0.0056, 0.0086, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 12:33:47,547 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.577e+02 2.140e+02 2.529e+02 3.098e+02 5.505e+02, threshold=5.058e+02, percent-clipped=1.0 +2023-04-26 12:33:59,191 INFO [finetune.py:976] (0/7) Epoch 2, batch 3800, loss[loss=0.26, simple_loss=0.3134, pruned_loss=0.1033, over 4784.00 frames. ], tot_loss[loss=0.2623, simple_loss=0.3123, pruned_loss=0.1062, over 952293.23 frames. ], batch size: 29, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:34:13,239 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4618, 2.4095, 2.7059, 2.9974, 2.8055, 2.1981, 1.7158, 2.4779], + device='cuda:0'), covar=tensor([0.1165, 0.1060, 0.0632, 0.0749, 0.0696, 0.1310, 0.1392, 0.0777], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0210, 0.0189, 0.0184, 0.0180, 0.0199, 0.0177, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:34:47,649 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-04-26 12:34:55,619 INFO [finetune.py:976] (0/7) Epoch 2, batch 3850, loss[loss=0.2458, simple_loss=0.2875, pruned_loss=0.1021, over 4819.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3093, pruned_loss=0.1046, over 953818.35 frames. ], batch size: 30, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:34:58,831 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9583.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:34:59,461 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9584.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:35:01,964 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1335, 1.7502, 2.3634, 2.5168, 1.7243, 1.4530, 2.1195, 1.0636], + device='cuda:0'), covar=tensor([0.1028, 0.1342, 0.0802, 0.1130, 0.1552, 0.1766, 0.1171, 0.1859], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0079, 0.0076, 0.0073, 0.0087, 0.0096, 0.0089, 0.0080], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 12:35:05,669 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2261, 0.5568, 0.8733, 1.4600, 1.3643, 1.0746, 1.0776, 0.9699], + device='cuda:0'), covar=tensor([2.9261, 4.0405, 4.6779, 5.3262, 3.2739, 4.7310, 4.9188, 3.5634], + device='cuda:0'), in_proj_covar=tensor([0.0449, 0.0508, 0.0600, 0.0594, 0.0481, 0.0526, 0.0537, 0.0548], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:35:15,163 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1602, 1.3319, 1.4129, 1.5081, 1.5023, 1.1017, 0.7782, 1.3488], + device='cuda:0'), covar=tensor([0.1158, 0.1442, 0.0924, 0.0836, 0.0779, 0.1240, 0.1396, 0.0817], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0211, 0.0190, 0.0184, 0.0181, 0.0200, 0.0177, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:35:22,631 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 2.119e+02 2.508e+02 2.913e+02 5.368e+02, threshold=5.017e+02, percent-clipped=1.0 +2023-04-26 12:35:29,152 INFO [finetune.py:976] (0/7) Epoch 2, batch 3900, loss[loss=0.2919, simple_loss=0.3271, pruned_loss=0.1284, over 4852.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3052, pruned_loss=0.1028, over 954812.97 frames. ], batch size: 49, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:35:38,160 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=9631.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:35:50,768 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9642.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:35:52,633 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9645.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:36:12,995 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9658.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:36:26,751 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=9669.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:36:38,403 INFO [finetune.py:976] (0/7) Epoch 2, batch 3950, loss[loss=0.2502, simple_loss=0.2983, pruned_loss=0.101, over 4912.00 frames. ], tot_loss[loss=0.253, simple_loss=0.3021, pruned_loss=0.1019, over 955143.57 frames. ], batch size: 37, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:36:55,188 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=9690.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:37:29,749 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=9717.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:37:36,039 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.382e+02 1.910e+02 2.412e+02 2.849e+02 4.927e+02, threshold=4.824e+02, percent-clipped=0.0 +2023-04-26 12:37:36,172 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9719.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:37:47,956 INFO [finetune.py:976] (0/7) Epoch 2, batch 4000, loss[loss=0.2879, simple_loss=0.3393, pruned_loss=0.1183, over 4817.00 frames. ], tot_loss[loss=0.2536, simple_loss=0.3021, pruned_loss=0.1026, over 956252.47 frames. ], batch size: 38, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:37:58,336 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=9743.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:38:08,729 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1019, 0.5345, 0.8236, 1.3766, 1.2696, 0.9763, 0.9878, 0.9021], + device='cuda:0'), covar=tensor([2.9446, 4.2158, 4.4928, 5.5552, 3.1712, 4.9998, 4.7515, 3.4469], + device='cuda:0'), in_proj_covar=tensor([0.0447, 0.0507, 0.0598, 0.0594, 0.0480, 0.0527, 0.0534, 0.0547], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:38:15,573 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9770.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:38:20,831 INFO [finetune.py:976] (0/7) Epoch 2, batch 4050, loss[loss=0.2862, simple_loss=0.353, pruned_loss=0.1097, over 4805.00 frames. ], tot_loss[loss=0.2564, simple_loss=0.3055, pruned_loss=0.1036, over 956278.77 frames. ], batch size: 45, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:38:38,416 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=9804.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:38:47,822 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 2.123e+02 2.473e+02 2.970e+02 7.758e+02, threshold=4.946e+02, percent-clipped=3.0 +2023-04-26 12:38:54,760 INFO [finetune.py:976] (0/7) Epoch 2, batch 4100, loss[loss=0.2648, simple_loss=0.3294, pruned_loss=0.1001, over 4909.00 frames. ], tot_loss[loss=0.2608, simple_loss=0.3106, pruned_loss=0.1055, over 956546.69 frames. ], batch size: 37, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:39:34,299 INFO [finetune.py:976] (0/7) Epoch 2, batch 4150, loss[loss=0.2492, simple_loss=0.2807, pruned_loss=0.1088, over 4077.00 frames. ], tot_loss[loss=0.2611, simple_loss=0.311, pruned_loss=0.1056, over 956160.25 frames. ], batch size: 17, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:40:18,083 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.356e+02 2.059e+02 2.438e+02 3.061e+02 5.791e+02, threshold=4.877e+02, percent-clipped=3.0 +2023-04-26 12:40:29,031 INFO [finetune.py:976] (0/7) Epoch 2, batch 4200, loss[loss=0.2697, simple_loss=0.3115, pruned_loss=0.1139, over 4751.00 frames. ], tot_loss[loss=0.2592, simple_loss=0.3102, pruned_loss=0.1041, over 956916.13 frames. ], batch size: 54, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:40:43,341 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=9940.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:41:15,724 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5290, 2.3721, 2.8964, 2.8971, 2.8045, 2.3452, 1.7084, 2.5378], + device='cuda:0'), covar=tensor([0.1143, 0.1031, 0.0536, 0.0803, 0.0699, 0.1134, 0.1406, 0.0695], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0213, 0.0192, 0.0186, 0.0182, 0.0202, 0.0179, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:41:36,131 INFO [finetune.py:976] (0/7) Epoch 2, batch 4250, loss[loss=0.2989, simple_loss=0.3311, pruned_loss=0.1334, over 4238.00 frames. ], tot_loss[loss=0.2565, simple_loss=0.3075, pruned_loss=0.1027, over 956423.16 frames. ], batch size: 65, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:42:09,284 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-10000.pt +2023-04-26 12:42:30,352 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10014.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:42:33,325 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.966e+02 2.336e+02 2.838e+02 4.911e+02, threshold=4.671e+02, percent-clipped=1.0 +2023-04-26 12:42:43,921 INFO [finetune.py:976] (0/7) Epoch 2, batch 4300, loss[loss=0.2462, simple_loss=0.2874, pruned_loss=0.1026, over 4222.00 frames. ], tot_loss[loss=0.2553, simple_loss=0.3055, pruned_loss=0.1026, over 957651.45 frames. ], batch size: 65, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:43:02,254 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-26 12:43:02,741 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10039.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:43:18,130 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10062.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 12:43:23,037 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10070.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:43:27,737 INFO [finetune.py:976] (0/7) Epoch 2, batch 4350, loss[loss=0.2634, simple_loss=0.3013, pruned_loss=0.1128, over 4729.00 frames. ], tot_loss[loss=0.2526, simple_loss=0.3021, pruned_loss=0.1015, over 957227.97 frames. ], batch size: 59, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:43:33,751 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1173, 3.2508, 0.8665, 1.5898, 1.4307, 2.2226, 1.9742, 1.0232], + device='cuda:0'), covar=tensor([0.1913, 0.1459, 0.2431, 0.1892, 0.1620, 0.1446, 0.1513, 0.2170], + device='cuda:0'), in_proj_covar=tensor([0.0125, 0.0272, 0.0152, 0.0132, 0.0144, 0.0166, 0.0129, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 12:43:41,994 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10099.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:43:42,663 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10100.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 12:43:54,962 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=10118.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:43:55,491 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.384e+02 2.038e+02 2.484e+02 2.915e+02 5.390e+02, threshold=4.968e+02, percent-clipped=2.0 +2023-04-26 12:43:58,056 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10123.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 12:44:00,980 INFO [finetune.py:976] (0/7) Epoch 2, batch 4400, loss[loss=0.2739, simple_loss=0.3274, pruned_loss=0.1102, over 4906.00 frames. ], tot_loss[loss=0.2546, simple_loss=0.3031, pruned_loss=0.1031, over 954000.23 frames. ], batch size: 35, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:44:34,791 INFO [finetune.py:976] (0/7) Epoch 2, batch 4450, loss[loss=0.2797, simple_loss=0.3313, pruned_loss=0.114, over 4812.00 frames. ], tot_loss[loss=0.2555, simple_loss=0.3049, pruned_loss=0.103, over 953936.87 frames. ], batch size: 51, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:45:03,113 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.522e+02 2.117e+02 2.495e+02 3.123e+02 6.793e+02, threshold=4.991e+02, percent-clipped=1.0 +2023-04-26 12:45:06,934 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2227, 1.3314, 1.4515, 1.5472, 1.5551, 1.6772, 1.5061, 1.5520], + device='cuda:0'), covar=tensor([1.6316, 3.3022, 2.6317, 2.2226, 2.5771, 4.2017, 3.2833, 2.8239], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0389, 0.0309, 0.0313, 0.0341, 0.0383, 0.0374, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:45:08,636 INFO [finetune.py:976] (0/7) Epoch 2, batch 4500, loss[loss=0.286, simple_loss=0.3244, pruned_loss=0.1238, over 4735.00 frames. ], tot_loss[loss=0.2578, simple_loss=0.3071, pruned_loss=0.1042, over 950874.71 frames. ], batch size: 54, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:45:12,403 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5102, 2.0843, 1.4872, 1.2962, 1.1977, 1.1975, 1.5047, 1.1571], + device='cuda:0'), covar=tensor([0.2511, 0.2131, 0.2639, 0.3228, 0.3962, 0.2934, 0.2103, 0.3095], + device='cuda:0'), in_proj_covar=tensor([0.0200, 0.0227, 0.0196, 0.0218, 0.0234, 0.0198, 0.0191, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 12:45:15,981 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10240.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:45:42,229 INFO [finetune.py:976] (0/7) Epoch 2, batch 4550, loss[loss=0.2448, simple_loss=0.303, pruned_loss=0.09329, over 4851.00 frames. ], tot_loss[loss=0.2583, simple_loss=0.3082, pruned_loss=0.1043, over 950501.40 frames. ], batch size: 44, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:45:48,409 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=10288.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:46:21,466 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10314.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:46:29,829 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.925e+02 2.197e+02 2.697e+02 5.130e+02, threshold=4.395e+02, percent-clipped=1.0 +2023-04-26 12:46:41,914 INFO [finetune.py:976] (0/7) Epoch 2, batch 4600, loss[loss=0.2707, simple_loss=0.3014, pruned_loss=0.12, over 4895.00 frames. ], tot_loss[loss=0.2581, simple_loss=0.3082, pruned_loss=0.104, over 951771.83 frames. ], batch size: 32, lr: 3.99e-03, grad_scale: 64.0 +2023-04-26 12:47:09,970 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=10362.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:47:29,153 INFO [finetune.py:976] (0/7) Epoch 2, batch 4650, loss[loss=0.2468, simple_loss=0.3036, pruned_loss=0.09498, over 4756.00 frames. ], tot_loss[loss=0.2537, simple_loss=0.304, pruned_loss=0.1017, over 953706.56 frames. ], batch size: 54, lr: 3.99e-03, grad_scale: 64.0 +2023-04-26 12:47:50,960 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10395.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 12:47:58,929 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10399.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:48:12,486 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10409.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:48:13,087 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4155, 1.6787, 1.6297, 2.0793, 1.9034, 2.0528, 1.6556, 3.6761], + device='cuda:0'), covar=tensor([0.0667, 0.0677, 0.0731, 0.1058, 0.0571, 0.0742, 0.0705, 0.0183], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0041, 0.0042, 0.0047, 0.0042, 0.0041, 0.0041, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 12:48:24,000 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10418.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 12:48:25,137 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 2.119e+02 2.411e+02 2.762e+02 5.438e+02, threshold=4.823e+02, percent-clipped=2.0 +2023-04-26 12:48:25,865 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4044, 1.5015, 1.4050, 1.5527, 1.5020, 1.7744, 1.5884, 1.5572], + device='cuda:0'), covar=tensor([1.3379, 2.7013, 2.4167, 1.9714, 2.2397, 3.5703, 2.8706, 2.3525], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0390, 0.0310, 0.0314, 0.0342, 0.0385, 0.0375, 0.0339], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:48:35,916 INFO [finetune.py:976] (0/7) Epoch 2, batch 4700, loss[loss=0.2073, simple_loss=0.2664, pruned_loss=0.07408, over 4800.00 frames. ], tot_loss[loss=0.2506, simple_loss=0.3004, pruned_loss=0.1004, over 952241.95 frames. ], batch size: 25, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:48:53,259 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=10447.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:48:55,146 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8176, 2.2939, 1.8872, 2.2266, 1.6024, 1.8723, 2.0477, 1.5331], + device='cuda:0'), covar=tensor([0.2317, 0.1435, 0.1184, 0.1304, 0.3186, 0.1524, 0.2013, 0.3329], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0336, 0.0246, 0.0310, 0.0314, 0.0288, 0.0278, 0.0302], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:48:57,668 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10454.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:49:09,512 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10470.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:49:15,191 INFO [finetune.py:976] (0/7) Epoch 2, batch 4750, loss[loss=0.2058, simple_loss=0.2569, pruned_loss=0.07733, over 4721.00 frames. ], tot_loss[loss=0.2488, simple_loss=0.2981, pruned_loss=0.0997, over 952127.23 frames. ], batch size: 23, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:49:15,384 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-26 12:49:19,610 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-26 12:49:25,581 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3610, 3.4161, 0.8762, 1.9199, 1.6701, 2.4446, 1.9440, 0.9663], + device='cuda:0'), covar=tensor([0.1786, 0.1548, 0.2710, 0.1581, 0.1458, 0.1433, 0.1807, 0.2457], + device='cuda:0'), in_proj_covar=tensor([0.0126, 0.0274, 0.0153, 0.0133, 0.0145, 0.0167, 0.0131, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 12:49:38,748 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7339, 1.5246, 1.7735, 1.9193, 1.8234, 1.6811, 1.7235, 1.6769], + device='cuda:0'), covar=tensor([2.7264, 3.6085, 4.1043, 4.6419, 2.9773, 4.4587, 4.3760, 3.3442], + device='cuda:0'), in_proj_covar=tensor([0.0447, 0.0506, 0.0599, 0.0594, 0.0479, 0.0522, 0.0533, 0.0544], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:49:40,413 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10515.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 12:49:43,424 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 1.957e+02 2.292e+02 2.912e+02 6.222e+02, threshold=4.583e+02, percent-clipped=3.0 +2023-04-26 12:49:49,337 INFO [finetune.py:976] (0/7) Epoch 2, batch 4800, loss[loss=0.2923, simple_loss=0.3426, pruned_loss=0.121, over 4815.00 frames. ], tot_loss[loss=0.2511, simple_loss=0.3004, pruned_loss=0.1008, over 951492.18 frames. ], batch size: 33, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:49:50,698 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5064, 1.8195, 1.3818, 1.1894, 1.1756, 1.1875, 1.3676, 1.1197], + device='cuda:0'), covar=tensor([0.2088, 0.1977, 0.2386, 0.2865, 0.3436, 0.2573, 0.1891, 0.2837], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0225, 0.0193, 0.0216, 0.0232, 0.0196, 0.0190, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 12:49:53,111 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-26 12:49:57,385 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.73 vs. limit=2.0 +2023-04-26 12:50:08,674 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1411, 2.2696, 2.5143, 2.7044, 2.7610, 1.9657, 1.5997, 2.2063], + device='cuda:0'), covar=tensor([0.1245, 0.1047, 0.0576, 0.0770, 0.0654, 0.1207, 0.1403, 0.0759], + device='cuda:0'), in_proj_covar=tensor([0.0215, 0.0214, 0.0192, 0.0187, 0.0183, 0.0203, 0.0180, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:50:23,189 INFO [finetune.py:976] (0/7) Epoch 2, batch 4850, loss[loss=0.2426, simple_loss=0.2947, pruned_loss=0.09529, over 4925.00 frames. ], tot_loss[loss=0.2554, simple_loss=0.3053, pruned_loss=0.1027, over 952133.11 frames. ], batch size: 38, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:50:50,776 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 2.100e+02 2.315e+02 2.802e+02 4.090e+02, threshold=4.629e+02, percent-clipped=1.0 +2023-04-26 12:50:56,075 INFO [finetune.py:976] (0/7) Epoch 2, batch 4900, loss[loss=0.2943, simple_loss=0.3327, pruned_loss=0.128, over 4727.00 frames. ], tot_loss[loss=0.2566, simple_loss=0.3074, pruned_loss=0.1029, over 953747.77 frames. ], batch size: 59, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:51:46,564 INFO [finetune.py:976] (0/7) Epoch 2, batch 4950, loss[loss=0.2728, simple_loss=0.3214, pruned_loss=0.1121, over 4898.00 frames. ], tot_loss[loss=0.257, simple_loss=0.3083, pruned_loss=0.1028, over 952489.88 frames. ], batch size: 43, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:52:10,673 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10695.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:52:23,341 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5598, 1.3773, 0.7647, 1.2490, 1.6133, 1.4657, 1.3394, 1.4017], + device='cuda:0'), covar=tensor([0.0583, 0.0456, 0.0486, 0.0611, 0.0336, 0.0546, 0.0550, 0.0678], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0048, 0.0044, 0.0038, 0.0049, 0.0037, 0.0047, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 12:52:34,218 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5046, 1.1081, 1.2237, 1.0805, 1.6849, 1.3676, 1.0529, 1.1626], + device='cuda:0'), covar=tensor([0.1561, 0.1445, 0.2030, 0.1743, 0.0782, 0.1439, 0.2053, 0.1918], + device='cuda:0'), in_proj_covar=tensor([0.0327, 0.0342, 0.0351, 0.0320, 0.0356, 0.0372, 0.0324, 0.0357], + device='cuda:0'), out_proj_covar=tensor([7.1449e-05, 7.3575e-05, 7.5878e-05, 6.7293e-05, 7.5914e-05, 8.1335e-05, + 7.0653e-05, 7.7230e-05], device='cuda:0') +2023-04-26 12:52:36,494 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=10718.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 12:52:43,790 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.438e+02 1.913e+02 2.320e+02 2.882e+02 5.075e+02, threshold=4.640e+02, percent-clipped=3.0 +2023-04-26 12:52:54,984 INFO [finetune.py:976] (0/7) Epoch 2, batch 5000, loss[loss=0.2607, simple_loss=0.2983, pruned_loss=0.1115, over 4730.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3049, pruned_loss=0.1008, over 954431.19 frames. ], batch size: 23, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:53:16,590 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=10743.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:53:17,250 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10744.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:53:26,960 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9545, 1.3113, 5.1551, 4.7711, 4.5192, 4.8228, 4.5532, 4.5497], + device='cuda:0'), covar=tensor([0.6181, 0.5989, 0.0934, 0.1839, 0.0962, 0.1041, 0.1328, 0.1364], + device='cuda:0'), in_proj_covar=tensor([0.0333, 0.0312, 0.0445, 0.0449, 0.0378, 0.0432, 0.0339, 0.0399], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 12:53:29,936 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10765.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:53:30,544 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=10766.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 12:53:33,508 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8085, 1.8045, 2.0853, 2.2538, 2.3695, 1.7602, 1.4185, 1.8798], + device='cuda:0'), covar=tensor([0.1323, 0.1364, 0.0789, 0.0864, 0.0739, 0.1338, 0.1512, 0.0898], + device='cuda:0'), in_proj_covar=tensor([0.0213, 0.0213, 0.0191, 0.0186, 0.0182, 0.0202, 0.0179, 0.0196], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:53:42,302 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10774.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:53:45,586 INFO [finetune.py:976] (0/7) Epoch 2, batch 5050, loss[loss=0.2636, simple_loss=0.3042, pruned_loss=0.1115, over 4872.00 frames. ], tot_loss[loss=0.2509, simple_loss=0.3018, pruned_loss=0.1, over 954736.89 frames. ], batch size: 34, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:54:25,686 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10805.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:54:28,741 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=10810.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 12:54:40,507 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.434e+02 1.942e+02 2.267e+02 2.719e+02 4.388e+02, threshold=4.533e+02, percent-clipped=0.0 +2023-04-26 12:54:51,195 INFO [finetune.py:976] (0/7) Epoch 2, batch 5100, loss[loss=0.2211, simple_loss=0.2727, pruned_loss=0.08477, over 4899.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.2973, pruned_loss=0.09766, over 950453.54 frames. ], batch size: 32, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:55:01,971 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10835.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:55:45,708 INFO [finetune.py:976] (0/7) Epoch 2, batch 5150, loss[loss=0.2495, simple_loss=0.3006, pruned_loss=0.09924, over 4833.00 frames. ], tot_loss[loss=0.2476, simple_loss=0.2977, pruned_loss=0.09869, over 951260.11 frames. ], batch size: 33, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:55:50,599 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=10885.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 12:55:55,979 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3730, 1.6155, 1.3386, 1.5717, 1.5303, 1.7085, 1.5364, 1.4812], + device='cuda:0'), covar=tensor([1.9919, 3.8806, 3.1010, 2.4885, 2.6714, 4.2785, 3.7036, 3.2408], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0394, 0.0312, 0.0315, 0.0345, 0.0387, 0.0378, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:56:13,137 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.326e+02 2.074e+02 2.408e+02 2.882e+02 5.966e+02, threshold=4.815e+02, percent-clipped=3.0 +2023-04-26 12:56:15,751 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 12:56:18,493 INFO [finetune.py:976] (0/7) Epoch 2, batch 5200, loss[loss=0.2528, simple_loss=0.2807, pruned_loss=0.1125, over 4489.00 frames. ], tot_loss[loss=0.2492, simple_loss=0.2998, pruned_loss=0.09926, over 952137.39 frames. ], batch size: 19, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:56:20,497 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9224, 1.9094, 1.6010, 1.4963, 2.0299, 1.5736, 2.4504, 1.3888], + device='cuda:0'), covar=tensor([0.3846, 0.1480, 0.4595, 0.2818, 0.1578, 0.2293, 0.1257, 0.4412], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0350, 0.0434, 0.0367, 0.0402, 0.0374, 0.0399, 0.0413], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 12:56:21,668 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4147, 1.0164, 0.2984, 1.0963, 1.0156, 1.2936, 1.1487, 1.1828], + device='cuda:0'), covar=tensor([0.0595, 0.0525, 0.0565, 0.0651, 0.0394, 0.0614, 0.0620, 0.0720], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0030, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0048, 0.0044, 0.0038, 0.0049, 0.0037, 0.0047, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 12:56:31,966 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=10946.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 12:56:48,267 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3932, 0.9470, 0.4702, 1.0743, 1.1061, 1.2595, 1.1176, 1.1651], + device='cuda:0'), covar=tensor([0.0632, 0.0509, 0.0563, 0.0654, 0.0396, 0.0627, 0.0619, 0.0730], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 12:56:48,856 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6332, 3.4894, 2.7762, 4.1622, 3.4798, 3.6776, 1.5406, 3.5510], + device='cuda:0'), covar=tensor([0.1701, 0.1244, 0.3740, 0.1569, 0.2642, 0.1808, 0.5457, 0.2351], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0229, 0.0272, 0.0323, 0.0316, 0.0267, 0.0280, 0.0284], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:56:57,990 INFO [finetune.py:976] (0/7) Epoch 2, batch 5250, loss[loss=0.2357, simple_loss=0.2913, pruned_loss=0.09005, over 4812.00 frames. ], tot_loss[loss=0.2518, simple_loss=0.3031, pruned_loss=0.1003, over 954010.73 frames. ], batch size: 38, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:57:40,222 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.533e+02 2.106e+02 2.454e+02 2.928e+02 5.017e+02, threshold=4.909e+02, percent-clipped=1.0 +2023-04-26 12:57:51,419 INFO [finetune.py:976] (0/7) Epoch 2, batch 5300, loss[loss=0.218, simple_loss=0.2812, pruned_loss=0.07738, over 4822.00 frames. ], tot_loss[loss=0.2532, simple_loss=0.3045, pruned_loss=0.101, over 954169.60 frames. ], batch size: 30, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:58:45,100 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11065.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:58:46,406 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0515, 1.3333, 1.7975, 2.4226, 1.7254, 1.3417, 1.2047, 1.6945], + device='cuda:0'), covar=tensor([0.6599, 0.7740, 0.3543, 0.6709, 0.7530, 0.5667, 0.9858, 0.7045], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0277, 0.0224, 0.0347, 0.0235, 0.0236, 0.0268, 0.0216], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 12:58:57,625 INFO [finetune.py:976] (0/7) Epoch 2, batch 5350, loss[loss=0.2563, simple_loss=0.3057, pruned_loss=0.1034, over 4206.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3044, pruned_loss=0.1, over 954729.82 frames. ], batch size: 65, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 12:59:05,262 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-26 12:59:29,028 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-26 12:59:29,509 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11100.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 12:59:47,721 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11110.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 12:59:50,021 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=11113.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:00:00,705 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.923e+02 2.240e+02 2.858e+02 6.419e+02, threshold=4.480e+02, percent-clipped=4.0 +2023-04-26 13:00:11,118 INFO [finetune.py:976] (0/7) Epoch 2, batch 5400, loss[loss=0.2795, simple_loss=0.3185, pruned_loss=0.1202, over 4934.00 frames. ], tot_loss[loss=0.2493, simple_loss=0.3013, pruned_loss=0.09862, over 955911.40 frames. ], batch size: 38, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:00:12,402 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11130.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:00:29,805 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-26 13:00:32,455 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=11158.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:00:42,730 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11174.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:00:45,043 INFO [finetune.py:976] (0/7) Epoch 2, batch 5450, loss[loss=0.2523, simple_loss=0.2976, pruned_loss=0.1035, over 4912.00 frames. ], tot_loss[loss=0.246, simple_loss=0.2978, pruned_loss=0.09715, over 955828.53 frames. ], batch size: 36, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:01:18,381 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.436e+02 1.901e+02 2.286e+02 2.895e+02 5.867e+02, threshold=4.573e+02, percent-clipped=4.0 +2023-04-26 13:01:23,718 INFO [finetune.py:976] (0/7) Epoch 2, batch 5500, loss[loss=0.2639, simple_loss=0.3197, pruned_loss=0.1041, over 4909.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.2958, pruned_loss=0.09648, over 956472.65 frames. ], batch size: 36, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:01:28,090 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11235.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:01:31,693 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11241.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 13:01:40,108 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2162, 1.5708, 1.2441, 1.4416, 1.3161, 1.1607, 1.3190, 1.0203], + device='cuda:0'), covar=tensor([0.2061, 0.1476, 0.1407, 0.1523, 0.3681, 0.1689, 0.2023, 0.2695], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0335, 0.0245, 0.0308, 0.0314, 0.0287, 0.0276, 0.0300], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:01:57,533 INFO [finetune.py:976] (0/7) Epoch 2, batch 5550, loss[loss=0.2271, simple_loss=0.2803, pruned_loss=0.08701, over 4802.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.296, pruned_loss=0.09693, over 954279.12 frames. ], batch size: 25, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:02:33,651 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.67 vs. limit=5.0 +2023-04-26 13:02:35,254 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.528e+02 2.026e+02 2.364e+02 2.674e+02 3.741e+02, threshold=4.728e+02, percent-clipped=0.0 +2023-04-26 13:02:39,953 INFO [finetune.py:976] (0/7) Epoch 2, batch 5600, loss[loss=0.2915, simple_loss=0.3231, pruned_loss=0.13, over 4150.00 frames. ], tot_loss[loss=0.2499, simple_loss=0.3015, pruned_loss=0.09914, over 951799.64 frames. ], batch size: 18, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:02:40,710 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-26 13:03:42,988 INFO [finetune.py:976] (0/7) Epoch 2, batch 5650, loss[loss=0.24, simple_loss=0.2918, pruned_loss=0.09407, over 4760.00 frames. ], tot_loss[loss=0.2523, simple_loss=0.3047, pruned_loss=0.09994, over 953197.83 frames. ], batch size: 59, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:04:08,639 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11400.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:04:31,185 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.867e+02 2.284e+02 2.863e+02 5.268e+02, threshold=4.568e+02, percent-clipped=1.0 +2023-04-26 13:04:35,905 INFO [finetune.py:976] (0/7) Epoch 2, batch 5700, loss[loss=0.2698, simple_loss=0.2945, pruned_loss=0.1226, over 3931.00 frames. ], tot_loss[loss=0.2497, simple_loss=0.3007, pruned_loss=0.09939, over 936566.26 frames. ], batch size: 17, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:04:37,179 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11430.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:04:47,963 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=11448.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:04:48,718 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 13:04:53,015 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-2.pt +2023-04-26 13:05:07,855 INFO [finetune.py:976] (0/7) Epoch 3, batch 0, loss[loss=0.2326, simple_loss=0.2867, pruned_loss=0.08923, over 4798.00 frames. ], tot_loss[loss=0.2326, simple_loss=0.2867, pruned_loss=0.08923, over 4798.00 frames. ], batch size: 25, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:05:07,856 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 13:05:24,857 INFO [finetune.py:1010] (0/7) Epoch 3, validation: loss=0.1779, simple_loss=0.251, pruned_loss=0.05243, over 2265189.00 frames. +2023-04-26 13:05:24,858 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6243MB +2023-04-26 13:05:36,336 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 13:05:42,221 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=11478.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:05:44,871 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 13:05:47,163 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2671, 1.4742, 1.4349, 1.9681, 1.6799, 1.9001, 1.3728, 4.1551], + device='cuda:0'), covar=tensor([0.0795, 0.0946, 0.1001, 0.1427, 0.0788, 0.0751, 0.0937, 0.0174], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0041, 0.0042, 0.0046, 0.0042, 0.0041, 0.0041, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 13:05:51,331 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2470, 1.5416, 1.3960, 1.8068, 1.6301, 1.8906, 1.3851, 3.3794], + device='cuda:0'), covar=tensor([0.0723, 0.0798, 0.0849, 0.1295, 0.0697, 0.0528, 0.0799, 0.0179], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0041, 0.0042, 0.0046, 0.0042, 0.0041, 0.0041, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 13:06:01,722 INFO [finetune.py:976] (0/7) Epoch 3, batch 50, loss[loss=0.2757, simple_loss=0.3203, pruned_loss=0.1155, over 4905.00 frames. ], tot_loss[loss=0.2551, simple_loss=0.307, pruned_loss=0.1016, over 217674.11 frames. ], batch size: 37, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:06:11,126 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.469e+02 1.955e+02 2.272e+02 2.719e+02 4.720e+02, threshold=4.545e+02, percent-clipped=1.0 +2023-04-26 13:06:17,276 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11530.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:06:18,549 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11532.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:06:24,401 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11541.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 13:06:27,544 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-26 13:06:34,900 INFO [finetune.py:976] (0/7) Epoch 3, batch 100, loss[loss=0.2395, simple_loss=0.2888, pruned_loss=0.0951, over 4910.00 frames. ], tot_loss[loss=0.2445, simple_loss=0.2964, pruned_loss=0.09631, over 382438.15 frames. ], batch size: 46, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:06:55,969 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=11589.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 13:06:58,891 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11593.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:07:08,541 INFO [finetune.py:976] (0/7) Epoch 3, batch 150, loss[loss=0.2176, simple_loss=0.2462, pruned_loss=0.09451, over 4261.00 frames. ], tot_loss[loss=0.241, simple_loss=0.2919, pruned_loss=0.09506, over 511023.10 frames. ], batch size: 18, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:07:18,014 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.434e+02 1.865e+02 2.222e+02 2.672e+02 4.139e+02, threshold=4.445e+02, percent-clipped=0.0 +2023-04-26 13:07:27,296 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11635.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:07:42,006 INFO [finetune.py:976] (0/7) Epoch 3, batch 200, loss[loss=0.2784, simple_loss=0.3351, pruned_loss=0.1109, over 4829.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.2885, pruned_loss=0.09285, over 608912.39 frames. ], batch size: 40, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:08:07,524 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1099, 1.4674, 1.3734, 1.7750, 1.5665, 1.7657, 1.4108, 3.0824], + device='cuda:0'), covar=tensor([0.0726, 0.0825, 0.0842, 0.1334, 0.0694, 0.0562, 0.0799, 0.0203], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0041, 0.0042, 0.0047, 0.0042, 0.0041, 0.0041, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 13:08:29,870 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11696.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:08:31,704 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11699.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:08:42,574 INFO [finetune.py:976] (0/7) Epoch 3, batch 250, loss[loss=0.2513, simple_loss=0.3084, pruned_loss=0.09711, over 4819.00 frames. ], tot_loss[loss=0.2388, simple_loss=0.2916, pruned_loss=0.09297, over 684537.79 frames. ], batch size: 38, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:09:01,055 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0429, 1.4444, 1.3414, 1.8142, 2.0894, 1.8344, 1.6801, 1.4982], + device='cuda:0'), covar=tensor([0.2225, 0.2281, 0.2392, 0.1787, 0.1398, 0.2110, 0.2604, 0.2189], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0338, 0.0346, 0.0314, 0.0349, 0.0363, 0.0321, 0.0354], + device='cuda:0'), out_proj_covar=tensor([7.0151e-05, 7.2587e-05, 7.4874e-05, 6.5984e-05, 7.4397e-05, 7.9352e-05, + 6.9826e-05, 7.6463e-05], device='cuda:0') +2023-04-26 13:09:02,119 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.384e+02 2.012e+02 2.336e+02 2.910e+02 4.662e+02, threshold=4.672e+02, percent-clipped=2.0 +2023-04-26 13:09:36,496 INFO [finetune.py:976] (0/7) Epoch 3, batch 300, loss[loss=0.2376, simple_loss=0.3027, pruned_loss=0.08624, over 4904.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.2956, pruned_loss=0.09425, over 743976.93 frames. ], batch size: 37, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:09:40,539 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11760.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:10:10,610 INFO [finetune.py:976] (0/7) Epoch 3, batch 350, loss[loss=0.2718, simple_loss=0.3187, pruned_loss=0.1124, over 4856.00 frames. ], tot_loss[loss=0.246, simple_loss=0.2995, pruned_loss=0.09623, over 792182.99 frames. ], batch size: 44, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:10:18,379 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11816.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:10:21,174 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.314e+02 2.028e+02 2.307e+02 2.756e+02 7.160e+02, threshold=4.615e+02, percent-clipped=2.0 +2023-04-26 13:10:28,337 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=11830.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:10:38,185 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11846.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:10:50,157 INFO [finetune.py:976] (0/7) Epoch 3, batch 400, loss[loss=0.2472, simple_loss=0.3103, pruned_loss=0.09202, over 4873.00 frames. ], tot_loss[loss=0.2461, simple_loss=0.2998, pruned_loss=0.09623, over 828393.51 frames. ], batch size: 32, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:11:20,880 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11877.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:11:21,399 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=11878.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:11:33,325 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11888.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:11:44,732 INFO [finetune.py:976] (0/7) Epoch 3, batch 450, loss[loss=0.19, simple_loss=0.2496, pruned_loss=0.06523, over 4814.00 frames. ], tot_loss[loss=0.2449, simple_loss=0.2982, pruned_loss=0.09575, over 857086.37 frames. ], batch size: 40, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:11:45,488 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11907.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:11:54,740 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.367e+02 1.957e+02 2.306e+02 2.695e+02 4.680e+02, threshold=4.613e+02, percent-clipped=1.0 +2023-04-26 13:11:54,871 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11920.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:12:09,717 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5075, 1.6487, 1.6105, 1.7833, 1.6826, 1.9169, 1.6699, 1.6848], + device='cuda:0'), covar=tensor([1.5863, 3.1100, 2.4097, 2.0036, 2.2035, 3.5031, 3.0678, 2.6681], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0394, 0.0312, 0.0318, 0.0345, 0.0391, 0.0379, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:12:13,338 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11948.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:12:18,146 INFO [finetune.py:976] (0/7) Epoch 3, batch 500, loss[loss=0.2279, simple_loss=0.2762, pruned_loss=0.08987, over 4836.00 frames. ], tot_loss[loss=0.2433, simple_loss=0.2959, pruned_loss=0.09538, over 879418.64 frames. ], batch size: 30, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:12:24,805 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4377, 1.1687, 1.4920, 1.6666, 1.5286, 1.3557, 1.3881, 1.4109], + device='cuda:0'), covar=tensor([2.6377, 3.8854, 4.2320, 4.3826, 2.8614, 4.7142, 4.7731, 3.4620], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0503, 0.0597, 0.0598, 0.0479, 0.0518, 0.0528, 0.0541], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:12:36,366 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=11981.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:12:38,146 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=11984.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:12:42,323 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=11991.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:12:47,790 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-12000.pt +2023-04-26 13:12:50,874 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8050, 1.4700, 2.0090, 1.9195, 1.5753, 1.3730, 1.7019, 0.9878], + device='cuda:0'), covar=tensor([0.0778, 0.1248, 0.0593, 0.1053, 0.1105, 0.1599, 0.0804, 0.1436], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0079, 0.0075, 0.0072, 0.0085, 0.0096, 0.0088, 0.0080], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 13:12:52,573 INFO [finetune.py:976] (0/7) Epoch 3, batch 550, loss[loss=0.2425, simple_loss=0.2905, pruned_loss=0.09728, over 4909.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2924, pruned_loss=0.09401, over 897844.22 frames. ], batch size: 46, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:12:54,561 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12009.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:13:02,613 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.347e+02 1.896e+02 2.113e+02 2.653e+02 4.840e+02, threshold=4.226e+02, percent-clipped=1.0 +2023-04-26 13:13:19,312 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12045.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:13:30,920 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12055.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:13:31,437 INFO [finetune.py:976] (0/7) Epoch 3, batch 600, loss[loss=0.2657, simple_loss=0.3157, pruned_loss=0.1079, over 4894.00 frames. ], tot_loss[loss=0.2413, simple_loss=0.2937, pruned_loss=0.09449, over 910920.42 frames. ], batch size: 35, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:14:01,458 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12076.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:14:27,388 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 13:14:38,095 INFO [finetune.py:976] (0/7) Epoch 3, batch 650, loss[loss=0.2678, simple_loss=0.331, pruned_loss=0.1023, over 4842.00 frames. ], tot_loss[loss=0.2437, simple_loss=0.2967, pruned_loss=0.09531, over 920953.53 frames. ], batch size: 49, lr: 3.99e-03, grad_scale: 32.0 +2023-04-26 13:14:55,287 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.228e+02 1.956e+02 2.261e+02 2.773e+02 5.813e+02, threshold=4.521e+02, percent-clipped=3.0 +2023-04-26 13:15:18,804 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12137.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:15:30,238 INFO [finetune.py:976] (0/7) Epoch 3, batch 700, loss[loss=0.2331, simple_loss=0.2908, pruned_loss=0.08769, over 4805.00 frames. ], tot_loss[loss=0.2456, simple_loss=0.2984, pruned_loss=0.09636, over 928234.96 frames. ], batch size: 25, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 13:15:40,477 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12172.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:15:52,212 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12188.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:16:00,818 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12202.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:16:03,161 INFO [finetune.py:976] (0/7) Epoch 3, batch 750, loss[loss=0.2338, simple_loss=0.298, pruned_loss=0.08484, over 4828.00 frames. ], tot_loss[loss=0.2465, simple_loss=0.2996, pruned_loss=0.09664, over 933776.70 frames. ], batch size: 30, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 13:16:11,580 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 2.083e+02 2.562e+02 2.927e+02 7.910e+02, threshold=5.125e+02, percent-clipped=5.0 +2023-04-26 13:16:23,274 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=12236.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:16:42,544 INFO [finetune.py:976] (0/7) Epoch 3, batch 800, loss[loss=0.1897, simple_loss=0.2571, pruned_loss=0.06114, over 4845.00 frames. ], tot_loss[loss=0.2462, simple_loss=0.2996, pruned_loss=0.09643, over 939017.04 frames. ], batch size: 47, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 13:17:00,570 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12276.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:17:11,636 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12291.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:17:15,754 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3703, 2.0880, 1.7538, 1.8461, 2.1965, 1.7044, 2.4574, 1.5120], + device='cuda:0'), covar=tensor([0.3724, 0.1478, 0.4184, 0.2812, 0.1693, 0.2394, 0.1769, 0.4224], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0355, 0.0438, 0.0373, 0.0405, 0.0381, 0.0401, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:17:19,891 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12304.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:17:21,023 INFO [finetune.py:976] (0/7) Epoch 3, batch 850, loss[loss=0.2446, simple_loss=0.2871, pruned_loss=0.101, over 4714.00 frames. ], tot_loss[loss=0.2446, simple_loss=0.297, pruned_loss=0.09608, over 940882.55 frames. ], batch size: 23, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 13:17:29,563 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.293e+02 1.971e+02 2.371e+02 2.612e+02 4.896e+02, threshold=4.741e+02, percent-clipped=0.0 +2023-04-26 13:17:30,841 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6785, 1.5045, 4.4499, 4.1819, 3.9149, 4.2088, 4.0739, 3.9656], + device='cuda:0'), covar=tensor([0.6406, 0.5445, 0.0954, 0.1498, 0.1049, 0.1373, 0.1516, 0.1368], + device='cuda:0'), in_proj_covar=tensor([0.0328, 0.0309, 0.0437, 0.0444, 0.0374, 0.0425, 0.0337, 0.0393], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 13:17:43,090 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=12339.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:17:43,715 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12340.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:17:48,345 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3311, 1.2777, 3.7847, 3.5205, 3.3550, 3.5531, 3.5338, 3.3273], + device='cuda:0'), covar=tensor([0.6906, 0.5536, 0.1117, 0.1635, 0.1222, 0.1902, 0.2149, 0.1697], + device='cuda:0'), in_proj_covar=tensor([0.0329, 0.0310, 0.0439, 0.0446, 0.0376, 0.0426, 0.0339, 0.0395], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 13:17:54,298 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12355.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:17:54,806 INFO [finetune.py:976] (0/7) Epoch 3, batch 900, loss[loss=0.2099, simple_loss=0.2631, pruned_loss=0.07837, over 4803.00 frames. ], tot_loss[loss=0.2412, simple_loss=0.2937, pruned_loss=0.09436, over 945872.64 frames. ], batch size: 25, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 13:18:26,968 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=12403.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:18:28,771 INFO [finetune.py:976] (0/7) Epoch 3, batch 950, loss[loss=0.2415, simple_loss=0.2774, pruned_loss=0.1028, over 4368.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.2923, pruned_loss=0.09376, over 947946.57 frames. ], batch size: 19, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:18:37,304 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.210e+02 1.930e+02 2.195e+02 2.798e+02 5.251e+02, threshold=4.389e+02, percent-clipped=2.0 +2023-04-26 13:18:50,723 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12432.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:19:13,293 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12446.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:19:20,776 INFO [finetune.py:976] (0/7) Epoch 3, batch 1000, loss[loss=0.2413, simple_loss=0.2855, pruned_loss=0.09852, over 4686.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2949, pruned_loss=0.09485, over 948145.73 frames. ], batch size: 23, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:19:26,424 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1414, 0.6886, 0.9305, 0.6777, 1.2494, 0.9907, 0.8041, 1.0150], + device='cuda:0'), covar=tensor([0.2022, 0.1874, 0.2312, 0.2068, 0.1203, 0.1927, 0.2332, 0.2581], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0338, 0.0348, 0.0314, 0.0350, 0.0364, 0.0321, 0.0354], + device='cuda:0'), out_proj_covar=tensor([7.0317e-05, 7.2697e-05, 7.5427e-05, 6.5939e-05, 7.4446e-05, 7.9504e-05, + 6.9797e-05, 7.6498e-05], device='cuda:0') +2023-04-26 13:19:30,617 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12472.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:19:57,863 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12502.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:20:06,942 INFO [finetune.py:976] (0/7) Epoch 3, batch 1050, loss[loss=0.2334, simple_loss=0.2658, pruned_loss=0.1005, over 4411.00 frames. ], tot_loss[loss=0.2436, simple_loss=0.2966, pruned_loss=0.09533, over 949118.02 frames. ], batch size: 19, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:20:07,654 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12507.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:20:25,976 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.380e+02 2.009e+02 2.317e+02 2.704e+02 5.500e+02, threshold=4.634e+02, percent-clipped=2.0 +2023-04-26 13:20:26,053 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=12520.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:20:39,052 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 13:21:02,672 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=12550.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:21:12,257 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-26 13:21:12,628 INFO [finetune.py:976] (0/7) Epoch 3, batch 1100, loss[loss=0.2023, simple_loss=0.2692, pruned_loss=0.06772, over 4758.00 frames. ], tot_loss[loss=0.2444, simple_loss=0.2985, pruned_loss=0.09521, over 951181.86 frames. ], batch size: 54, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:21:15,537 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3262, 1.2321, 3.8213, 3.5184, 3.3960, 3.6326, 3.6483, 3.3835], + device='cuda:0'), covar=tensor([0.6392, 0.5572, 0.1018, 0.1748, 0.1176, 0.1488, 0.1703, 0.1495], + device='cuda:0'), in_proj_covar=tensor([0.0326, 0.0310, 0.0438, 0.0444, 0.0375, 0.0425, 0.0335, 0.0394], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 13:21:18,172 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-04-26 13:21:24,120 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-26 13:21:25,859 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12576.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:21:35,447 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-26 13:21:44,423 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12604.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:21:46,054 INFO [finetune.py:976] (0/7) Epoch 3, batch 1150, loss[loss=0.2248, simple_loss=0.2889, pruned_loss=0.08037, over 4902.00 frames. ], tot_loss[loss=0.2453, simple_loss=0.2996, pruned_loss=0.09551, over 952418.24 frames. ], batch size: 36, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:21:54,478 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9013, 1.2260, 1.7976, 2.0035, 1.5606, 1.2363, 1.0675, 1.4104], + device='cuda:0'), covar=tensor([0.5606, 0.6852, 0.3006, 0.5446, 0.6187, 0.5076, 0.8722, 0.6049], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0274, 0.0223, 0.0343, 0.0232, 0.0235, 0.0264, 0.0211], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:21:55,572 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.367e+02 1.845e+02 2.160e+02 2.613e+02 5.486e+02, threshold=4.320e+02, percent-clipped=1.0 +2023-04-26 13:21:58,096 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=12624.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:22:19,249 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12640.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:22:34,207 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=12652.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:22:36,635 INFO [finetune.py:976] (0/7) Epoch 3, batch 1200, loss[loss=0.259, simple_loss=0.3048, pruned_loss=0.1066, over 4856.00 frames. ], tot_loss[loss=0.2438, simple_loss=0.2981, pruned_loss=0.09475, over 953528.30 frames. ], batch size: 31, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:22:55,174 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7884, 2.4343, 1.6496, 1.5312, 1.2885, 1.3660, 1.7597, 1.2416], + device='cuda:0'), covar=tensor([0.2244, 0.1847, 0.2434, 0.2871, 0.3519, 0.2644, 0.1765, 0.2864], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0223, 0.0190, 0.0215, 0.0229, 0.0194, 0.0185, 0.0205], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 13:22:57,547 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=12688.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:23:04,192 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7137, 2.5372, 3.0737, 3.1522, 2.7148, 2.4370, 1.9935, 2.6704], + device='cuda:0'), covar=tensor([0.1047, 0.1064, 0.0482, 0.0729, 0.0794, 0.1098, 0.1172, 0.0713], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0212, 0.0190, 0.0185, 0.0184, 0.0201, 0.0177, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:23:09,932 INFO [finetune.py:976] (0/7) Epoch 3, batch 1250, loss[loss=0.2448, simple_loss=0.2958, pruned_loss=0.09688, over 4819.00 frames. ], tot_loss[loss=0.2408, simple_loss=0.2951, pruned_loss=0.09326, over 954430.01 frames. ], batch size: 25, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:23:19,406 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.393e+02 1.929e+02 2.277e+02 2.741e+02 5.638e+02, threshold=4.554e+02, percent-clipped=3.0 +2023-04-26 13:23:25,061 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-26 13:23:26,778 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=12732.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:23:42,758 INFO [finetune.py:976] (0/7) Epoch 3, batch 1300, loss[loss=0.1829, simple_loss=0.2479, pruned_loss=0.05894, over 4763.00 frames. ], tot_loss[loss=0.2382, simple_loss=0.2921, pruned_loss=0.0921, over 954241.77 frames. ], batch size: 27, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:23:47,608 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=12763.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:24:09,813 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=12780.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:24:36,000 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=12802.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:24:39,384 INFO [finetune.py:976] (0/7) Epoch 3, batch 1350, loss[loss=0.2051, simple_loss=0.2686, pruned_loss=0.0708, over 4770.00 frames. ], tot_loss[loss=0.2391, simple_loss=0.2927, pruned_loss=0.09274, over 953618.60 frames. ], batch size: 28, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:24:48,916 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.232e+02 1.880e+02 2.141e+02 2.633e+02 4.297e+02, threshold=4.283e+02, percent-clipped=1.0 +2023-04-26 13:24:51,942 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=12824.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 13:25:12,234 INFO [finetune.py:976] (0/7) Epoch 3, batch 1400, loss[loss=0.2226, simple_loss=0.2777, pruned_loss=0.08375, over 4881.00 frames. ], tot_loss[loss=0.2423, simple_loss=0.2964, pruned_loss=0.09415, over 955775.06 frames. ], batch size: 32, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:25:27,706 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-04-26 13:25:56,156 INFO [finetune.py:976] (0/7) Epoch 3, batch 1450, loss[loss=0.2692, simple_loss=0.3386, pruned_loss=0.09993, over 4867.00 frames. ], tot_loss[loss=0.2454, simple_loss=0.2994, pruned_loss=0.09571, over 955910.34 frames. ], batch size: 34, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:26:17,005 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.925e+02 2.434e+02 2.951e+02 7.906e+02, threshold=4.868e+02, percent-clipped=4.0 +2023-04-26 13:27:02,921 INFO [finetune.py:976] (0/7) Epoch 3, batch 1500, loss[loss=0.2823, simple_loss=0.3417, pruned_loss=0.1114, over 4846.00 frames. ], tot_loss[loss=0.2463, simple_loss=0.3002, pruned_loss=0.09622, over 955518.91 frames. ], batch size: 44, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:27:17,297 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-26 13:27:26,245 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-26 13:27:40,484 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6793, 2.5861, 3.0682, 3.1372, 2.8288, 2.4912, 2.0495, 2.6044], + device='cuda:0'), covar=tensor([0.1202, 0.1077, 0.0596, 0.0777, 0.0741, 0.1274, 0.1267, 0.0771], + device='cuda:0'), in_proj_covar=tensor([0.0211, 0.0211, 0.0190, 0.0185, 0.0184, 0.0201, 0.0177, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:27:41,577 INFO [finetune.py:976] (0/7) Epoch 3, batch 1550, loss[loss=0.2354, simple_loss=0.2918, pruned_loss=0.08953, over 4748.00 frames. ], tot_loss[loss=0.2441, simple_loss=0.2986, pruned_loss=0.09475, over 956523.92 frames. ], batch size: 59, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:28:02,730 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.270e+02 2.021e+02 2.296e+02 2.726e+02 4.661e+02, threshold=4.592e+02, percent-clipped=0.0 +2023-04-26 13:28:33,838 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-26 13:28:47,302 INFO [finetune.py:976] (0/7) Epoch 3, batch 1600, loss[loss=0.2352, simple_loss=0.2909, pruned_loss=0.08976, over 4849.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.297, pruned_loss=0.09495, over 956782.25 frames. ], batch size: 44, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:28:47,995 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2511, 1.3952, 3.8575, 3.5695, 3.4239, 3.5936, 3.6816, 3.4397], + device='cuda:0'), covar=tensor([0.6892, 0.5215, 0.1091, 0.1890, 0.1168, 0.1799, 0.1516, 0.1409], + device='cuda:0'), in_proj_covar=tensor([0.0326, 0.0310, 0.0439, 0.0443, 0.0374, 0.0425, 0.0337, 0.0391], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:29:10,867 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.64 vs. limit=5.0 +2023-04-26 13:29:11,322 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5513, 1.9151, 1.5838, 1.7490, 1.6518, 1.8766, 1.6447, 1.6460], + device='cuda:0'), covar=tensor([1.5058, 2.6461, 2.3480, 1.9036, 2.1929, 3.1743, 3.0421, 2.6120], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0399, 0.0315, 0.0322, 0.0349, 0.0401, 0.0384, 0.0345], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:29:22,128 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-26 13:29:23,814 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13102.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:29:26,216 INFO [finetune.py:976] (0/7) Epoch 3, batch 1650, loss[loss=0.208, simple_loss=0.2576, pruned_loss=0.07915, over 4770.00 frames. ], tot_loss[loss=0.2402, simple_loss=0.2936, pruned_loss=0.09345, over 957079.08 frames. ], batch size: 26, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:29:30,577 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2727, 1.5862, 1.2657, 1.5192, 1.3733, 1.2030, 1.3282, 1.1248], + device='cuda:0'), covar=tensor([0.2172, 0.1558, 0.1321, 0.1534, 0.3451, 0.1690, 0.2022, 0.2695], + device='cuda:0'), in_proj_covar=tensor([0.0320, 0.0341, 0.0249, 0.0312, 0.0319, 0.0292, 0.0281, 0.0304], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:29:34,718 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=13119.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 13:29:35,230 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.899e+02 2.180e+02 2.609e+02 5.027e+02, threshold=4.359e+02, percent-clipped=1.0 +2023-04-26 13:29:44,649 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4694, 3.8435, 0.8564, 2.1226, 2.1828, 2.5870, 2.3841, 1.0647], + device='cuda:0'), covar=tensor([0.1371, 0.0950, 0.2146, 0.1326, 0.1006, 0.1193, 0.1477, 0.2319], + device='cuda:0'), in_proj_covar=tensor([0.0125, 0.0271, 0.0152, 0.0132, 0.0143, 0.0166, 0.0129, 0.0134], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 13:29:53,535 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6636, 3.5839, 2.6828, 4.1476, 3.5462, 3.6222, 1.5537, 3.5412], + device='cuda:0'), covar=tensor([0.1792, 0.1132, 0.3142, 0.1817, 0.3072, 0.1849, 0.5822, 0.2350], + device='cuda:0'), in_proj_covar=tensor([0.0255, 0.0229, 0.0271, 0.0321, 0.0317, 0.0266, 0.0282, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:29:55,915 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=13150.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:29:55,967 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6467, 1.4401, 0.7775, 1.2872, 1.7545, 1.5135, 1.4104, 1.4493], + device='cuda:0'), covar=tensor([0.0575, 0.0481, 0.0484, 0.0627, 0.0325, 0.0601, 0.0618, 0.0711], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0048, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 13:29:59,487 INFO [finetune.py:976] (0/7) Epoch 3, batch 1700, loss[loss=0.2524, simple_loss=0.3062, pruned_loss=0.09932, over 4761.00 frames. ], tot_loss[loss=0.2397, simple_loss=0.2923, pruned_loss=0.09352, over 958606.13 frames. ], batch size: 26, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:30:00,199 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9722, 2.6993, 2.1877, 2.4341, 1.8339, 2.0397, 2.2087, 1.7147], + device='cuda:0'), covar=tensor([0.2465, 0.1412, 0.0971, 0.1576, 0.3129, 0.1572, 0.1957, 0.2996], + device='cuda:0'), in_proj_covar=tensor([0.0320, 0.0341, 0.0249, 0.0312, 0.0319, 0.0292, 0.0281, 0.0304], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:30:33,506 INFO [finetune.py:976] (0/7) Epoch 3, batch 1750, loss[loss=0.2424, simple_loss=0.2884, pruned_loss=0.09819, over 4871.00 frames. ], tot_loss[loss=0.2421, simple_loss=0.2946, pruned_loss=0.09477, over 955899.54 frames. ], batch size: 31, lr: 3.98e-03, grad_scale: 64.0 +2023-04-26 13:30:43,128 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.228e+02 1.834e+02 2.276e+02 2.863e+02 4.766e+02, threshold=4.553e+02, percent-clipped=3.0 +2023-04-26 13:31:07,405 INFO [finetune.py:976] (0/7) Epoch 3, batch 1800, loss[loss=0.2412, simple_loss=0.2999, pruned_loss=0.09129, over 4141.00 frames. ], tot_loss[loss=0.244, simple_loss=0.2978, pruned_loss=0.09506, over 955160.30 frames. ], batch size: 65, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 13:31:14,336 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-26 13:31:47,120 INFO [finetune.py:976] (0/7) Epoch 3, batch 1850, loss[loss=0.2764, simple_loss=0.3257, pruned_loss=0.1136, over 4808.00 frames. ], tot_loss[loss=0.2458, simple_loss=0.2998, pruned_loss=0.09588, over 955753.73 frames. ], batch size: 40, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:31:56,813 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.349e+02 1.904e+02 2.289e+02 2.801e+02 9.204e+02, threshold=4.579e+02, percent-clipped=2.0 +2023-04-26 13:32:30,329 INFO [finetune.py:976] (0/7) Epoch 3, batch 1900, loss[loss=0.2433, simple_loss=0.3211, pruned_loss=0.0828, over 4818.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.299, pruned_loss=0.09476, over 955465.26 frames. ], batch size: 39, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:33:26,631 INFO [finetune.py:976] (0/7) Epoch 3, batch 1950, loss[loss=0.2146, simple_loss=0.2615, pruned_loss=0.08386, over 3914.00 frames. ], tot_loss[loss=0.2405, simple_loss=0.2954, pruned_loss=0.09282, over 954755.03 frames. ], batch size: 17, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:33:28,623 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2408, 1.7027, 1.5427, 2.0022, 1.7771, 2.1833, 1.5757, 3.8488], + device='cuda:0'), covar=tensor([0.0785, 0.0797, 0.0892, 0.1245, 0.0723, 0.0543, 0.0789, 0.0162], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0041, 0.0042, 0.0046, 0.0042, 0.0041, 0.0041, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 13:33:39,963 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=13419.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:33:41,755 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.766e+02 2.138e+02 2.602e+02 4.545e+02, threshold=4.277e+02, percent-clipped=0.0 +2023-04-26 13:34:17,442 INFO [finetune.py:976] (0/7) Epoch 3, batch 2000, loss[loss=0.2056, simple_loss=0.2549, pruned_loss=0.07821, over 4028.00 frames. ], tot_loss[loss=0.2378, simple_loss=0.2918, pruned_loss=0.09185, over 955597.21 frames. ], batch size: 17, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:34:24,603 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=13467.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:34:25,239 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1830, 1.5973, 1.3958, 1.8562, 1.7016, 2.1504, 1.4623, 3.4279], + device='cuda:0'), covar=tensor([0.0739, 0.0760, 0.0802, 0.1211, 0.0637, 0.0512, 0.0751, 0.0182], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0041, 0.0042, 0.0046, 0.0042, 0.0041, 0.0041, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 13:34:30,193 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-26 13:34:51,089 INFO [finetune.py:976] (0/7) Epoch 3, batch 2050, loss[loss=0.1587, simple_loss=0.2329, pruned_loss=0.04224, over 4788.00 frames. ], tot_loss[loss=0.2349, simple_loss=0.2884, pruned_loss=0.09066, over 955387.92 frames. ], batch size: 29, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:35:01,247 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.294e+02 1.882e+02 2.225e+02 2.584e+02 5.744e+02, threshold=4.451e+02, percent-clipped=3.0 +2023-04-26 13:35:08,669 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0003, 2.0371, 1.7950, 1.6847, 2.1733, 1.7276, 2.7400, 1.5423], + device='cuda:0'), covar=tensor([0.4573, 0.1982, 0.5414, 0.3676, 0.2090, 0.3003, 0.1504, 0.5081], + device='cuda:0'), in_proj_covar=tensor([0.0355, 0.0357, 0.0441, 0.0377, 0.0408, 0.0385, 0.0404, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:35:19,611 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9960, 1.0579, 3.1884, 2.7730, 2.9439, 2.9902, 3.1086, 2.7184], + device='cuda:0'), covar=tensor([0.9619, 0.7499, 0.2459, 0.4196, 0.2578, 0.4222, 0.3025, 0.3678], + device='cuda:0'), in_proj_covar=tensor([0.0327, 0.0312, 0.0438, 0.0446, 0.0373, 0.0426, 0.0337, 0.0394], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 13:35:24,226 INFO [finetune.py:976] (0/7) Epoch 3, batch 2100, loss[loss=0.2421, simple_loss=0.2996, pruned_loss=0.09236, over 4910.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.2884, pruned_loss=0.09104, over 954504.69 frames. ], batch size: 36, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:35:57,672 INFO [finetune.py:976] (0/7) Epoch 3, batch 2150, loss[loss=0.2556, simple_loss=0.3164, pruned_loss=0.09741, over 4840.00 frames. ], tot_loss[loss=0.2399, simple_loss=0.2932, pruned_loss=0.09331, over 956098.92 frames. ], batch size: 47, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:36:07,903 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.340e+02 1.924e+02 2.294e+02 2.856e+02 4.572e+02, threshold=4.587e+02, percent-clipped=1.0 +2023-04-26 13:36:14,105 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7661, 1.5218, 2.2051, 2.1308, 1.6004, 1.2362, 1.8359, 1.2107], + device='cuda:0'), covar=tensor([0.1090, 0.1124, 0.0671, 0.1062, 0.1275, 0.1795, 0.1019, 0.1407], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0079, 0.0077, 0.0071, 0.0084, 0.0097, 0.0088, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 13:36:30,853 INFO [finetune.py:976] (0/7) Epoch 3, batch 2200, loss[loss=0.2357, simple_loss=0.3014, pruned_loss=0.08496, over 4811.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2951, pruned_loss=0.09379, over 956770.22 frames. ], batch size: 39, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:37:21,213 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8959, 1.4077, 1.7654, 2.1916, 1.7048, 1.3762, 1.2384, 1.5851], + device='cuda:0'), covar=tensor([0.4438, 0.5562, 0.2388, 0.3938, 0.5364, 0.4044, 0.7397, 0.5263], + device='cuda:0'), in_proj_covar=tensor([0.0270, 0.0273, 0.0224, 0.0342, 0.0231, 0.0235, 0.0263, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:37:21,693 INFO [finetune.py:976] (0/7) Epoch 3, batch 2250, loss[loss=0.2225, simple_loss=0.2919, pruned_loss=0.07649, over 4254.00 frames. ], tot_loss[loss=0.2443, simple_loss=0.2978, pruned_loss=0.09537, over 954409.21 frames. ], batch size: 65, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:37:31,818 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.241e+02 1.916e+02 2.386e+02 2.867e+02 7.645e+02, threshold=4.772e+02, percent-clipped=4.0 +2023-04-26 13:37:43,769 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=13737.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:37:54,723 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-26 13:38:02,146 INFO [finetune.py:976] (0/7) Epoch 3, batch 2300, loss[loss=0.2385, simple_loss=0.2993, pruned_loss=0.0889, over 4887.00 frames. ], tot_loss[loss=0.2434, simple_loss=0.2973, pruned_loss=0.09475, over 955557.19 frames. ], batch size: 43, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:38:11,230 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.64 vs. limit=5.0 +2023-04-26 13:38:11,684 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5584, 1.3773, 0.6388, 1.1977, 1.4580, 1.3998, 1.2868, 1.3139], + device='cuda:0'), covar=tensor([0.0576, 0.0429, 0.0490, 0.0627, 0.0321, 0.0606, 0.0557, 0.0670], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 13:38:44,316 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2400, 2.8960, 0.9800, 1.5218, 1.6211, 2.0268, 1.9563, 1.1442], + device='cuda:0'), covar=tensor([0.1670, 0.1916, 0.2247, 0.1719, 0.1252, 0.1385, 0.1586, 0.1808], + device='cuda:0'), in_proj_covar=tensor([0.0125, 0.0271, 0.0151, 0.0132, 0.0143, 0.0166, 0.0129, 0.0133], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 13:38:57,792 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=13798.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 13:39:09,163 INFO [finetune.py:976] (0/7) Epoch 3, batch 2350, loss[loss=0.2505, simple_loss=0.3026, pruned_loss=0.09914, over 4923.00 frames. ], tot_loss[loss=0.2414, simple_loss=0.2949, pruned_loss=0.09399, over 958121.06 frames. ], batch size: 38, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:39:37,986 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.317e+02 2.025e+02 2.480e+02 2.975e+02 5.089e+02, threshold=4.960e+02, percent-clipped=2.0 +2023-04-26 13:40:01,096 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4487, 3.0392, 1.0247, 1.7045, 1.8175, 2.2166, 1.9921, 1.0609], + device='cuda:0'), covar=tensor([0.1299, 0.1049, 0.1891, 0.1285, 0.0997, 0.1013, 0.1357, 0.1531], + device='cuda:0'), in_proj_covar=tensor([0.0124, 0.0270, 0.0150, 0.0132, 0.0142, 0.0165, 0.0128, 0.0132], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 13:40:20,278 INFO [finetune.py:976] (0/7) Epoch 3, batch 2400, loss[loss=0.2035, simple_loss=0.2647, pruned_loss=0.07116, over 4761.00 frames. ], tot_loss[loss=0.2393, simple_loss=0.2924, pruned_loss=0.09308, over 958101.06 frames. ], batch size: 26, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:40:37,908 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.56 vs. limit=5.0 +2023-04-26 13:40:51,160 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1817, 1.8599, 2.5730, 2.6302, 1.8596, 1.5542, 2.1842, 1.3763], + device='cuda:0'), covar=tensor([0.0881, 0.1189, 0.0594, 0.0825, 0.1064, 0.1637, 0.0970, 0.1443], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0079, 0.0076, 0.0071, 0.0084, 0.0097, 0.0088, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 13:40:57,682 INFO [finetune.py:976] (0/7) Epoch 3, batch 2450, loss[loss=0.2256, simple_loss=0.2828, pruned_loss=0.08414, over 4838.00 frames. ], tot_loss[loss=0.2362, simple_loss=0.2892, pruned_loss=0.09158, over 957626.63 frames. ], batch size: 47, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:41:07,648 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0274, 2.1391, 1.7561, 1.7878, 2.2317, 1.6540, 2.7987, 1.5686], + device='cuda:0'), covar=tensor([0.4858, 0.1909, 0.5758, 0.3385, 0.2001, 0.3241, 0.1163, 0.4847], + device='cuda:0'), in_proj_covar=tensor([0.0353, 0.0355, 0.0438, 0.0373, 0.0405, 0.0383, 0.0400, 0.0418], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:41:09,355 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.373e+02 2.035e+02 2.305e+02 2.759e+02 5.668e+02, threshold=4.609e+02, percent-clipped=2.0 +2023-04-26 13:41:31,085 INFO [finetune.py:976] (0/7) Epoch 3, batch 2500, loss[loss=0.2615, simple_loss=0.3286, pruned_loss=0.09717, over 4824.00 frames. ], tot_loss[loss=0.2367, simple_loss=0.2897, pruned_loss=0.09185, over 955199.52 frames. ], batch size: 40, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:41:31,199 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5200, 1.3883, 1.5825, 1.8327, 1.9384, 1.4191, 1.2098, 1.6059], + device='cuda:0'), covar=tensor([0.1100, 0.1362, 0.0856, 0.0726, 0.0675, 0.1135, 0.1135, 0.0777], + device='cuda:0'), in_proj_covar=tensor([0.0212, 0.0212, 0.0191, 0.0185, 0.0185, 0.0202, 0.0177, 0.0198], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:41:44,351 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.63 vs. limit=2.0 +2023-04-26 13:41:58,001 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0895, 2.0187, 1.7776, 1.7638, 2.1788, 1.6429, 2.7823, 1.6083], + device='cuda:0'), covar=tensor([0.4416, 0.2041, 0.4748, 0.3740, 0.2223, 0.3109, 0.1328, 0.4615], + device='cuda:0'), in_proj_covar=tensor([0.0353, 0.0356, 0.0437, 0.0373, 0.0406, 0.0383, 0.0399, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:42:01,117 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-14000.pt +2023-04-26 13:42:05,882 INFO [finetune.py:976] (0/7) Epoch 3, batch 2550, loss[loss=0.2331, simple_loss=0.2922, pruned_loss=0.08702, over 4924.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.2933, pruned_loss=0.09289, over 956268.50 frames. ], batch size: 33, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:42:27,192 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0696, 1.3555, 1.8092, 2.5114, 1.8397, 1.4433, 1.2955, 1.7792], + device='cuda:0'), covar=tensor([0.4937, 0.5790, 0.2697, 0.5004, 0.5290, 0.3847, 0.7116, 0.4490], + device='cuda:0'), in_proj_covar=tensor([0.0269, 0.0272, 0.0223, 0.0342, 0.0230, 0.0234, 0.0261, 0.0208], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:42:28,112 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.240e+02 1.992e+02 2.434e+02 2.841e+02 4.921e+02, threshold=4.868e+02, percent-clipped=1.0 +2023-04-26 13:43:13,249 INFO [finetune.py:976] (0/7) Epoch 3, batch 2600, loss[loss=0.2252, simple_loss=0.2784, pruned_loss=0.086, over 4791.00 frames. ], tot_loss[loss=0.24, simple_loss=0.2941, pruned_loss=0.09288, over 956488.73 frames. ], batch size: 25, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:43:34,080 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-26 13:43:37,607 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14072.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:44:06,886 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14093.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 13:44:16,132 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-26 13:44:19,011 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14103.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:44:25,133 INFO [finetune.py:976] (0/7) Epoch 3, batch 2650, loss[loss=0.2371, simple_loss=0.3004, pruned_loss=0.0869, over 4746.00 frames. ], tot_loss[loss=0.24, simple_loss=0.2946, pruned_loss=0.09265, over 957631.72 frames. ], batch size: 59, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:44:29,000 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.48 vs. limit=5.0 +2023-04-26 13:44:37,674 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6302, 1.2813, 1.2908, 1.3692, 1.9916, 1.6391, 1.3218, 1.2533], + device='cuda:0'), covar=tensor([0.1894, 0.1793, 0.2103, 0.1600, 0.0821, 0.1637, 0.2335, 0.1910], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0335, 0.0347, 0.0310, 0.0347, 0.0357, 0.0317, 0.0349], + device='cuda:0'), out_proj_covar=tensor([6.9273e-05, 7.1938e-05, 7.5047e-05, 6.5023e-05, 7.3785e-05, 7.8070e-05, + 6.9076e-05, 7.5331e-05], device='cuda:0') +2023-04-26 13:44:39,940 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14120.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 13:44:47,127 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.256e+02 1.867e+02 2.323e+02 2.768e+02 1.192e+03, threshold=4.647e+02, percent-clipped=2.0 +2023-04-26 13:45:00,928 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14133.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:45:02,282 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 13:45:30,809 INFO [finetune.py:976] (0/7) Epoch 3, batch 2700, loss[loss=0.2321, simple_loss=0.2762, pruned_loss=0.09399, over 4828.00 frames. ], tot_loss[loss=0.2386, simple_loss=0.2936, pruned_loss=0.09178, over 956594.52 frames. ], batch size: 25, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:45:41,224 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14164.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:46:03,629 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14181.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 13:46:14,298 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7213, 1.3604, 1.3075, 1.3972, 2.0176, 1.6292, 1.3098, 1.2734], + device='cuda:0'), covar=tensor([0.1836, 0.2018, 0.2262, 0.1752, 0.1200, 0.2230, 0.2978, 0.2249], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0336, 0.0347, 0.0311, 0.0348, 0.0358, 0.0317, 0.0349], + device='cuda:0'), out_proj_covar=tensor([6.9119e-05, 7.2058e-05, 7.5029e-05, 6.5077e-05, 7.3893e-05, 7.8218e-05, + 6.9006e-05, 7.5365e-05], device='cuda:0') +2023-04-26 13:46:19,362 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-26 13:46:30,715 INFO [finetune.py:976] (0/7) Epoch 3, batch 2750, loss[loss=0.2271, simple_loss=0.2789, pruned_loss=0.08765, over 4765.00 frames. ], tot_loss[loss=0.236, simple_loss=0.2907, pruned_loss=0.09066, over 957096.36 frames. ], batch size: 27, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:46:38,086 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6716, 1.1852, 1.2938, 1.2826, 1.8912, 1.5206, 1.2115, 1.2214], + device='cuda:0'), covar=tensor([0.2007, 0.1895, 0.2437, 0.1794, 0.0992, 0.1885, 0.2241, 0.2258], + device='cuda:0'), in_proj_covar=tensor([0.0320, 0.0338, 0.0348, 0.0312, 0.0350, 0.0360, 0.0319, 0.0351], + device='cuda:0'), out_proj_covar=tensor([6.9693e-05, 7.2548e-05, 7.5441e-05, 6.5436e-05, 7.4382e-05, 7.8707e-05, + 6.9375e-05, 7.5860e-05], device='cuda:0') +2023-04-26 13:46:40,385 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.352e+02 1.823e+02 2.218e+02 2.622e+02 5.684e+02, threshold=4.435e+02, percent-clipped=2.0 +2023-04-26 13:46:57,075 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3696, 2.9846, 0.9200, 1.7104, 1.7744, 2.2093, 1.9271, 1.0548], + device='cuda:0'), covar=tensor([0.1337, 0.0898, 0.2080, 0.1381, 0.1100, 0.1063, 0.1418, 0.1856], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0267, 0.0149, 0.0130, 0.0141, 0.0164, 0.0127, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 13:47:03,042 INFO [finetune.py:976] (0/7) Epoch 3, batch 2800, loss[loss=0.219, simple_loss=0.2724, pruned_loss=0.08282, over 4929.00 frames. ], tot_loss[loss=0.2335, simple_loss=0.2877, pruned_loss=0.08964, over 957774.62 frames. ], batch size: 36, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:47:29,443 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3947, 3.1848, 0.9658, 1.8275, 1.8106, 2.2977, 1.9925, 1.1814], + device='cuda:0'), covar=tensor([0.1299, 0.0810, 0.1997, 0.1266, 0.1045, 0.1085, 0.1258, 0.1902], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0267, 0.0149, 0.0131, 0.0141, 0.0164, 0.0127, 0.0131], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 13:47:35,420 INFO [finetune.py:976] (0/7) Epoch 3, batch 2850, loss[loss=0.2813, simple_loss=0.3304, pruned_loss=0.1161, over 4793.00 frames. ], tot_loss[loss=0.2322, simple_loss=0.2862, pruned_loss=0.08913, over 956898.50 frames. ], batch size: 45, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:47:41,429 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-26 13:47:45,433 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.273e+02 1.919e+02 2.285e+02 2.703e+02 8.098e+02, threshold=4.570e+02, percent-clipped=1.0 +2023-04-26 13:48:06,251 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-26 13:48:08,420 INFO [finetune.py:976] (0/7) Epoch 3, batch 2900, loss[loss=0.2239, simple_loss=0.288, pruned_loss=0.07985, over 4829.00 frames. ], tot_loss[loss=0.2352, simple_loss=0.2894, pruned_loss=0.09049, over 953446.80 frames. ], batch size: 40, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:48:33,698 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14393.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:48:41,650 INFO [finetune.py:976] (0/7) Epoch 3, batch 2950, loss[loss=0.2132, simple_loss=0.2788, pruned_loss=0.07378, over 4795.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.2935, pruned_loss=0.09181, over 953262.85 frames. ], batch size: 26, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:48:42,305 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.8981, 4.8232, 3.4338, 5.5752, 4.8448, 4.8985, 2.6454, 4.8213], + device='cuda:0'), covar=tensor([0.1303, 0.0762, 0.2450, 0.0696, 0.2465, 0.1337, 0.4535, 0.1840], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0228, 0.0268, 0.0320, 0.0315, 0.0265, 0.0282, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:48:46,069 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14413.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:48:51,910 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.499e+02 2.004e+02 2.390e+02 2.807e+02 6.198e+02, threshold=4.780e+02, percent-clipped=3.0 +2023-04-26 13:48:52,660 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7877, 2.3089, 1.9643, 2.1894, 1.6627, 1.8885, 1.9763, 1.6108], + device='cuda:0'), covar=tensor([0.2410, 0.1390, 0.1054, 0.1353, 0.3628, 0.1516, 0.2129, 0.2858], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0339, 0.0248, 0.0310, 0.0321, 0.0291, 0.0280, 0.0302], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:48:55,645 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14428.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:49:04,518 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=14441.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:49:14,497 INFO [finetune.py:976] (0/7) Epoch 3, batch 3000, loss[loss=0.2525, simple_loss=0.2966, pruned_loss=0.1042, over 4742.00 frames. ], tot_loss[loss=0.2385, simple_loss=0.2936, pruned_loss=0.09171, over 953199.31 frames. ], batch size: 23, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:49:14,498 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 13:49:18,880 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6737, 2.1007, 1.5270, 1.2473, 1.2757, 1.2769, 1.4934, 1.2722], + device='cuda:0'), covar=tensor([0.2415, 0.1908, 0.2406, 0.2812, 0.3783, 0.2839, 0.1959, 0.2977], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0223, 0.0188, 0.0213, 0.0227, 0.0193, 0.0183, 0.0203], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 13:49:25,022 INFO [finetune.py:1010] (0/7) Epoch 3, validation: loss=0.1699, simple_loss=0.2433, pruned_loss=0.04821, over 2265189.00 frames. +2023-04-26 13:49:25,022 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6243MB +2023-04-26 13:49:26,927 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14459.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:49:36,016 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14474.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:49:37,141 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14476.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 13:49:44,302 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14487.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:50:13,202 INFO [finetune.py:976] (0/7) Epoch 3, batch 3050, loss[loss=0.2633, simple_loss=0.3199, pruned_loss=0.1034, over 4814.00 frames. ], tot_loss[loss=0.2395, simple_loss=0.2951, pruned_loss=0.09196, over 954615.98 frames. ], batch size: 33, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:50:20,668 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2408, 2.3311, 2.4479, 2.8065, 2.7046, 1.9705, 1.7270, 2.2836], + device='cuda:0'), covar=tensor([0.1077, 0.0928, 0.0588, 0.0644, 0.0676, 0.1248, 0.1224, 0.0657], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0209, 0.0188, 0.0182, 0.0182, 0.0200, 0.0174, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:50:24,173 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.447e+02 2.059e+02 2.417e+02 2.833e+02 5.136e+02, threshold=4.834e+02, percent-clipped=1.0 +2023-04-26 13:50:52,327 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14548.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:50:52,885 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8241, 3.6846, 2.7583, 4.3538, 3.8009, 3.8372, 1.7188, 3.6798], + device='cuda:0'), covar=tensor([0.1574, 0.1234, 0.3416, 0.1601, 0.2837, 0.1622, 0.5570, 0.2134], + device='cuda:0'), in_proj_covar=tensor([0.0257, 0.0229, 0.0269, 0.0321, 0.0315, 0.0265, 0.0283, 0.0282], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:51:03,204 INFO [finetune.py:976] (0/7) Epoch 3, batch 3100, loss[loss=0.1948, simple_loss=0.2588, pruned_loss=0.06537, over 4801.00 frames. ], tot_loss[loss=0.238, simple_loss=0.2934, pruned_loss=0.0913, over 954632.38 frames. ], batch size: 51, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:51:03,997 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-26 13:51:40,371 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9971, 3.7980, 2.8829, 4.5998, 3.9991, 4.0547, 2.0275, 3.8228], + device='cuda:0'), covar=tensor([0.1511, 0.1260, 0.2766, 0.1478, 0.2230, 0.1694, 0.5250, 0.2159], + device='cuda:0'), in_proj_covar=tensor([0.0258, 0.0229, 0.0270, 0.0322, 0.0316, 0.0266, 0.0284, 0.0283], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:51:58,021 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 13:52:09,677 INFO [finetune.py:976] (0/7) Epoch 3, batch 3150, loss[loss=0.2824, simple_loss=0.3307, pruned_loss=0.117, over 4912.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.2891, pruned_loss=0.08928, over 955106.25 frames. ], batch size: 36, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:52:22,660 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-26 13:52:30,357 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.439e+02 2.002e+02 2.381e+02 3.061e+02 6.553e+02, threshold=4.761e+02, percent-clipped=3.0 +2023-04-26 13:52:51,772 INFO [finetune.py:976] (0/7) Epoch 3, batch 3200, loss[loss=0.2764, simple_loss=0.3281, pruned_loss=0.1124, over 4865.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2852, pruned_loss=0.08743, over 956728.22 frames. ], batch size: 34, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:53:25,510 INFO [finetune.py:976] (0/7) Epoch 3, batch 3250, loss[loss=0.2467, simple_loss=0.3054, pruned_loss=0.094, over 4823.00 frames. ], tot_loss[loss=0.2291, simple_loss=0.2837, pruned_loss=0.08725, over 955290.78 frames. ], batch size: 39, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:53:37,682 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.274e+02 1.942e+02 2.271e+02 2.763e+02 5.504e+02, threshold=4.542e+02, percent-clipped=2.0 +2023-04-26 13:53:42,002 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14728.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:53:45,761 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7188, 1.3790, 1.2013, 1.5092, 1.9809, 1.5690, 1.2734, 1.2539], + device='cuda:0'), covar=tensor([0.2260, 0.1985, 0.2489, 0.1772, 0.1029, 0.2102, 0.2895, 0.2222], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0339, 0.0351, 0.0313, 0.0352, 0.0362, 0.0321, 0.0354], + device='cuda:0'), out_proj_covar=tensor([7.0008e-05, 7.2882e-05, 7.5959e-05, 6.5505e-05, 7.4800e-05, 7.8945e-05, + 6.9961e-05, 7.6394e-05], device='cuda:0') +2023-04-26 13:53:49,339 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=14740.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:53:59,443 INFO [finetune.py:976] (0/7) Epoch 3, batch 3300, loss[loss=0.2837, simple_loss=0.335, pruned_loss=0.1162, over 4872.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.2882, pruned_loss=0.08913, over 953464.07 frames. ], batch size: 31, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:54:01,403 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14759.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:54:09,424 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14769.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:54:13,620 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7152, 2.1819, 1.1463, 1.3660, 2.2222, 1.6255, 1.5404, 1.6615], + device='cuda:0'), covar=tensor([0.0607, 0.0385, 0.0387, 0.0608, 0.0274, 0.0598, 0.0596, 0.0665], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 13:54:14,674 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=14776.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:54:14,701 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=14776.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 13:54:30,614 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=14801.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:54:33,538 INFO [finetune.py:976] (0/7) Epoch 3, batch 3350, loss[loss=0.2517, simple_loss=0.3088, pruned_loss=0.09729, over 4865.00 frames. ], tot_loss[loss=0.2338, simple_loss=0.2895, pruned_loss=0.08905, over 954392.82 frames. ], batch size: 34, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:54:34,171 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=14807.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:54:44,575 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.421e+02 1.946e+02 2.255e+02 2.721e+02 6.180e+02, threshold=4.510e+02, percent-clipped=3.0 +2023-04-26 13:54:46,353 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=14824.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 13:54:48,674 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1561, 1.4724, 1.9122, 2.3484, 1.8214, 1.4465, 1.1011, 1.7461], + device='cuda:0'), covar=tensor([0.4975, 0.5666, 0.2796, 0.4996, 0.5469, 0.4438, 0.7574, 0.4644], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0273, 0.0224, 0.0343, 0.0231, 0.0235, 0.0262, 0.0209], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:54:54,022 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9741, 2.3702, 1.0719, 1.2375, 1.9544, 1.2676, 3.0050, 1.4960], + device='cuda:0'), covar=tensor([0.0681, 0.0607, 0.0785, 0.1263, 0.0480, 0.0961, 0.0241, 0.0707], + device='cuda:0'), in_proj_covar=tensor([0.0055, 0.0072, 0.0053, 0.0050, 0.0055, 0.0056, 0.0084, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 13:54:58,936 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=14843.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:55:13,044 INFO [finetune.py:976] (0/7) Epoch 3, batch 3400, loss[loss=0.2359, simple_loss=0.2962, pruned_loss=0.08781, over 4733.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.293, pruned_loss=0.09092, over 955331.59 frames. ], batch size: 54, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:56:04,479 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.14 vs. limit=5.0 +2023-04-26 13:56:08,496 INFO [finetune.py:976] (0/7) Epoch 3, batch 3450, loss[loss=0.2682, simple_loss=0.3116, pruned_loss=0.1124, over 4902.00 frames. ], tot_loss[loss=0.2374, simple_loss=0.2932, pruned_loss=0.09081, over 955312.36 frames. ], batch size: 37, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:56:18,847 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.286e+02 1.992e+02 2.282e+02 2.746e+02 5.093e+02, threshold=4.564e+02, percent-clipped=2.0 +2023-04-26 13:56:20,111 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6569, 1.6176, 1.9008, 1.8958, 1.8495, 1.6199, 1.7383, 1.7901], + device='cuda:0'), covar=tensor([2.1630, 2.7403, 3.2644, 3.6491, 2.3695, 3.6500, 3.7339, 2.6497], + device='cuda:0'), in_proj_covar=tensor([0.0446, 0.0497, 0.0588, 0.0598, 0.0478, 0.0510, 0.0524, 0.0532], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:56:40,739 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.51 vs. limit=5.0 +2023-04-26 13:56:42,394 INFO [finetune.py:976] (0/7) Epoch 3, batch 3500, loss[loss=0.205, simple_loss=0.2675, pruned_loss=0.07119, over 4769.00 frames. ], tot_loss[loss=0.2341, simple_loss=0.2899, pruned_loss=0.08914, over 955750.60 frames. ], batch size: 26, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:57:38,371 INFO [finetune.py:976] (0/7) Epoch 3, batch 3550, loss[loss=0.2014, simple_loss=0.2431, pruned_loss=0.07985, over 4218.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2866, pruned_loss=0.0887, over 954887.75 frames. ], batch size: 17, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:57:51,769 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7195, 1.9329, 1.6708, 1.8616, 1.7349, 2.0073, 1.7570, 1.7164], + device='cuda:0'), covar=tensor([1.1115, 2.2939, 1.9096, 1.5419, 1.7671, 2.7940, 2.5208, 2.0947], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0403, 0.0320, 0.0326, 0.0353, 0.0410, 0.0389, 0.0347], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:57:54,036 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.782e+02 2.249e+02 2.630e+02 4.772e+02, threshold=4.498e+02, percent-clipped=1.0 +2023-04-26 13:58:28,221 INFO [finetune.py:976] (0/7) Epoch 3, batch 3600, loss[loss=0.244, simple_loss=0.2931, pruned_loss=0.09746, over 4821.00 frames. ], tot_loss[loss=0.2299, simple_loss=0.284, pruned_loss=0.08789, over 955226.08 frames. ], batch size: 38, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:58:28,945 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6740, 3.5547, 0.8302, 1.9588, 2.1544, 2.3370, 2.1954, 1.0667], + device='cuda:0'), covar=tensor([0.1317, 0.1087, 0.2245, 0.1372, 0.0980, 0.1242, 0.1397, 0.1886], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0268, 0.0149, 0.0131, 0.0140, 0.0165, 0.0127, 0.0131], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 13:58:34,684 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-04-26 13:58:36,830 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15069.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:58:56,111 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15096.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:59:02,216 INFO [finetune.py:976] (0/7) Epoch 3, batch 3650, loss[loss=0.2589, simple_loss=0.3186, pruned_loss=0.09957, over 4734.00 frames. ], tot_loss[loss=0.2319, simple_loss=0.2866, pruned_loss=0.08859, over 955530.35 frames. ], batch size: 54, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:59:09,047 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=15117.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:59:12,570 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.417e+02 2.035e+02 2.405e+02 3.001e+02 5.155e+02, threshold=4.810e+02, percent-clipped=2.0 +2023-04-26 13:59:25,742 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7589, 2.5421, 2.0337, 2.1974, 1.8401, 1.9531, 2.1316, 1.6523], + device='cuda:0'), covar=tensor([0.3074, 0.1627, 0.1202, 0.1810, 0.3577, 0.1820, 0.2778, 0.3856], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0337, 0.0247, 0.0310, 0.0321, 0.0291, 0.0280, 0.0302], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 13:59:28,031 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15143.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 13:59:34,537 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8539, 2.8331, 2.2168, 3.2507, 2.8785, 2.8436, 1.1312, 2.8152], + device='cuda:0'), covar=tensor([0.2252, 0.1715, 0.3734, 0.2966, 0.3411, 0.2095, 0.5863, 0.2702], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0227, 0.0269, 0.0322, 0.0314, 0.0264, 0.0282, 0.0281], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 13:59:36,235 INFO [finetune.py:976] (0/7) Epoch 3, batch 3700, loss[loss=0.2444, simple_loss=0.2923, pruned_loss=0.09824, over 4897.00 frames. ], tot_loss[loss=0.2346, simple_loss=0.2897, pruned_loss=0.0898, over 954447.85 frames. ], batch size: 35, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 13:59:59,374 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=15191.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:00:02,753 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15195.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:00:10,003 INFO [finetune.py:976] (0/7) Epoch 3, batch 3750, loss[loss=0.2731, simple_loss=0.3149, pruned_loss=0.1156, over 4821.00 frames. ], tot_loss[loss=0.2357, simple_loss=0.2911, pruned_loss=0.09018, over 953980.29 frames. ], batch size: 33, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 14:00:24,832 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.429e+02 1.976e+02 2.277e+02 2.735e+02 5.558e+02, threshold=4.554e+02, percent-clipped=2.0 +2023-04-26 14:00:55,181 INFO [finetune.py:976] (0/7) Epoch 3, batch 3800, loss[loss=0.2623, simple_loss=0.3147, pruned_loss=0.1049, over 4837.00 frames. ], tot_loss[loss=0.2371, simple_loss=0.293, pruned_loss=0.09057, over 955091.97 frames. ], batch size: 47, lr: 3.98e-03, grad_scale: 16.0 +2023-04-26 14:00:55,312 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15256.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 14:01:17,191 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.1368, 3.4914, 1.6296, 2.2418, 3.0154, 2.3053, 4.6883, 2.8410], + device='cuda:0'), covar=tensor([0.0496, 0.0691, 0.0780, 0.1070, 0.0439, 0.0792, 0.0194, 0.0485], + device='cuda:0'), in_proj_covar=tensor([0.0055, 0.0072, 0.0053, 0.0050, 0.0055, 0.0055, 0.0084, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 14:01:29,009 INFO [finetune.py:976] (0/7) Epoch 3, batch 3850, loss[loss=0.2313, simple_loss=0.2852, pruned_loss=0.08867, over 4774.00 frames. ], tot_loss[loss=0.2355, simple_loss=0.2911, pruned_loss=0.08991, over 951009.73 frames. ], batch size: 29, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:01:38,751 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.876e+02 2.244e+02 2.688e+02 1.462e+03, threshold=4.488e+02, percent-clipped=2.0 +2023-04-26 14:02:01,731 INFO [finetune.py:976] (0/7) Epoch 3, batch 3900, loss[loss=0.2268, simple_loss=0.277, pruned_loss=0.08826, over 4758.00 frames. ], tot_loss[loss=0.2324, simple_loss=0.2879, pruned_loss=0.08846, over 952834.52 frames. ], batch size: 27, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:02:16,746 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1611, 1.5137, 2.0566, 2.5976, 1.9392, 1.4854, 1.3716, 1.9680], + device='cuda:0'), covar=tensor([0.4708, 0.5806, 0.2488, 0.4048, 0.5037, 0.4266, 0.6616, 0.4181], + device='cuda:0'), in_proj_covar=tensor([0.0271, 0.0270, 0.0222, 0.0340, 0.0229, 0.0233, 0.0259, 0.0207], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 14:02:50,611 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15396.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:02:51,923 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 14:03:03,039 INFO [finetune.py:976] (0/7) Epoch 3, batch 3950, loss[loss=0.1959, simple_loss=0.2533, pruned_loss=0.06922, over 4905.00 frames. ], tot_loss[loss=0.2283, simple_loss=0.2838, pruned_loss=0.0864, over 954189.35 frames. ], batch size: 36, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:03:03,733 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1863, 1.5625, 1.4667, 1.8495, 1.6707, 1.9566, 1.4727, 3.7480], + device='cuda:0'), covar=tensor([0.0680, 0.0814, 0.0829, 0.1261, 0.0664, 0.0556, 0.0763, 0.0140], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0041, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 14:03:24,414 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.293e+02 1.749e+02 2.086e+02 2.518e+02 3.780e+02, threshold=4.171e+02, percent-clipped=0.0 +2023-04-26 14:03:54,141 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=15444.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:04:08,306 INFO [finetune.py:976] (0/7) Epoch 3, batch 4000, loss[loss=0.2981, simple_loss=0.3454, pruned_loss=0.1254, over 4815.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.2852, pruned_loss=0.08807, over 954785.20 frames. ], batch size: 41, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:05:04,593 INFO [finetune.py:976] (0/7) Epoch 3, batch 4050, loss[loss=0.2475, simple_loss=0.3088, pruned_loss=0.09312, over 4836.00 frames. ], tot_loss[loss=0.2314, simple_loss=0.2865, pruned_loss=0.08814, over 954724.88 frames. ], batch size: 47, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:05:15,729 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.986e+02 2.201e+02 2.641e+02 4.376e+02, threshold=4.402e+02, percent-clipped=3.0 +2023-04-26 14:05:33,868 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=15551.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 14:05:36,856 INFO [finetune.py:976] (0/7) Epoch 3, batch 4100, loss[loss=0.2487, simple_loss=0.3033, pruned_loss=0.09703, over 4859.00 frames. ], tot_loss[loss=0.2329, simple_loss=0.2892, pruned_loss=0.08834, over 955195.15 frames. ], batch size: 31, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:06:10,673 INFO [finetune.py:976] (0/7) Epoch 3, batch 4150, loss[loss=0.2571, simple_loss=0.3156, pruned_loss=0.09927, over 4924.00 frames. ], tot_loss[loss=0.2343, simple_loss=0.2904, pruned_loss=0.08909, over 955756.13 frames. ], batch size: 38, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:06:23,086 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.264e+02 1.923e+02 2.257e+02 2.741e+02 5.009e+02, threshold=4.514e+02, percent-clipped=1.0 +2023-04-26 14:06:31,800 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-26 14:06:32,281 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0244, 4.3446, 0.8752, 2.2277, 2.6542, 2.6684, 2.6297, 0.9804], + device='cuda:0'), covar=tensor([0.1245, 0.0955, 0.2167, 0.1259, 0.0851, 0.1122, 0.1354, 0.2000], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0266, 0.0148, 0.0130, 0.0140, 0.0163, 0.0127, 0.0131], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 14:06:42,599 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-26 14:06:43,827 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 14:06:44,258 INFO [finetune.py:976] (0/7) Epoch 3, batch 4200, loss[loss=0.2588, simple_loss=0.3044, pruned_loss=0.1066, over 4834.00 frames. ], tot_loss[loss=0.2344, simple_loss=0.2911, pruned_loss=0.08881, over 954291.75 frames. ], batch size: 49, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:07:17,985 INFO [finetune.py:976] (0/7) Epoch 3, batch 4250, loss[loss=0.2188, simple_loss=0.275, pruned_loss=0.08129, over 4911.00 frames. ], tot_loss[loss=0.232, simple_loss=0.2886, pruned_loss=0.08767, over 954288.59 frames. ], batch size: 46, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:07:24,128 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-04-26 14:07:29,538 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.926e+02 2.195e+02 2.674e+02 5.290e+02, threshold=4.390e+02, percent-clipped=1.0 +2023-04-26 14:07:37,367 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7507, 2.0093, 1.0665, 1.3561, 2.1670, 1.5940, 1.5392, 1.5533], + device='cuda:0'), covar=tensor([0.0552, 0.0404, 0.0381, 0.0577, 0.0243, 0.0569, 0.0553, 0.0648], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 14:07:45,797 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.3950, 4.2758, 3.1333, 5.0551, 4.4138, 4.3687, 2.2579, 4.3295], + device='cuda:0'), covar=tensor([0.1612, 0.1007, 0.3251, 0.0938, 0.2716, 0.1755, 0.5165, 0.2003], + device='cuda:0'), in_proj_covar=tensor([0.0256, 0.0228, 0.0269, 0.0322, 0.0316, 0.0266, 0.0281, 0.0283], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 14:07:50,579 INFO [finetune.py:976] (0/7) Epoch 3, batch 4300, loss[loss=0.2101, simple_loss=0.2695, pruned_loss=0.07532, over 4832.00 frames. ], tot_loss[loss=0.229, simple_loss=0.2852, pruned_loss=0.08646, over 954659.67 frames. ], batch size: 39, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:08:59,709 INFO [finetune.py:976] (0/7) Epoch 3, batch 4350, loss[loss=0.2814, simple_loss=0.3311, pruned_loss=0.1158, over 4798.00 frames. ], tot_loss[loss=0.2265, simple_loss=0.2822, pruned_loss=0.08536, over 955297.78 frames. ], batch size: 51, lr: 3.98e-03, grad_scale: 32.0 +2023-04-26 14:09:01,635 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6455, 1.5308, 0.7415, 1.2571, 1.8316, 1.5208, 1.3887, 1.4828], + device='cuda:0'), covar=tensor([0.0568, 0.0433, 0.0442, 0.0612, 0.0295, 0.0586, 0.0542, 0.0641], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 14:09:21,816 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.851e+02 2.117e+02 2.553e+02 4.240e+02, threshold=4.233e+02, percent-clipped=0.0 +2023-04-26 14:09:33,231 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-26 14:09:44,806 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-26 14:10:01,734 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=15851.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:10:04,716 INFO [finetune.py:976] (0/7) Epoch 3, batch 4400, loss[loss=0.322, simple_loss=0.3665, pruned_loss=0.1387, over 4787.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2841, pruned_loss=0.08686, over 954027.41 frames. ], batch size: 54, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:10:04,805 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1597, 1.4717, 1.3563, 1.8010, 1.5266, 1.7620, 1.5034, 3.4993], + device='cuda:0'), covar=tensor([0.0712, 0.0791, 0.0877, 0.1227, 0.0680, 0.0582, 0.0756, 0.0206], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 14:10:39,611 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=15883.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:10:49,609 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=15899.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:10:53,847 INFO [finetune.py:976] (0/7) Epoch 3, batch 4450, loss[loss=0.2181, simple_loss=0.2765, pruned_loss=0.07985, over 4819.00 frames. ], tot_loss[loss=0.2318, simple_loss=0.2881, pruned_loss=0.08773, over 954989.29 frames. ], batch size: 25, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:10:59,382 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6298, 1.1961, 1.2521, 1.2747, 1.9118, 1.4989, 1.2151, 1.2046], + device='cuda:0'), covar=tensor([0.1717, 0.1847, 0.2273, 0.1686, 0.0990, 0.2076, 0.2589, 0.2140], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0341, 0.0354, 0.0314, 0.0353, 0.0364, 0.0321, 0.0356], + device='cuda:0'), out_proj_covar=tensor([6.9867e-05, 7.3159e-05, 7.6656e-05, 6.5867e-05, 7.4919e-05, 7.9325e-05, + 6.9960e-05, 7.6940e-05], device='cuda:0') +2023-04-26 14:11:04,060 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.361e+02 2.045e+02 2.492e+02 3.065e+02 6.089e+02, threshold=4.985e+02, percent-clipped=5.0 +2023-04-26 14:11:20,092 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=15944.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:11:27,286 INFO [finetune.py:976] (0/7) Epoch 3, batch 4500, loss[loss=0.2717, simple_loss=0.3287, pruned_loss=0.1074, over 4900.00 frames. ], tot_loss[loss=0.2332, simple_loss=0.2897, pruned_loss=0.08835, over 953151.29 frames. ], batch size: 37, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:11:27,443 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5778, 1.5649, 1.8222, 1.8185, 1.7868, 1.5007, 1.6655, 1.6751], + device='cuda:0'), covar=tensor([2.0206, 2.5649, 3.0485, 3.1703, 2.0186, 3.4113, 3.4862, 2.3738], + device='cuda:0'), in_proj_covar=tensor([0.0446, 0.0494, 0.0585, 0.0597, 0.0477, 0.0507, 0.0520, 0.0531], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:11:50,054 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7776, 1.8812, 1.2648, 1.4362, 1.9975, 1.6315, 1.5687, 1.6176], + device='cuda:0'), covar=tensor([0.0512, 0.0350, 0.0422, 0.0510, 0.0300, 0.0491, 0.0474, 0.0579], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 14:11:57,309 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-16000.pt +2023-04-26 14:11:59,091 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16001.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:12:02,031 INFO [finetune.py:976] (0/7) Epoch 3, batch 4550, loss[loss=0.1849, simple_loss=0.2491, pruned_loss=0.06036, over 4744.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.2897, pruned_loss=0.08827, over 953037.42 frames. ], batch size: 23, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:12:11,702 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.282e+02 1.831e+02 2.180e+02 2.572e+02 4.192e+02, threshold=4.359e+02, percent-clipped=0.0 +2023-04-26 14:12:35,166 INFO [finetune.py:976] (0/7) Epoch 3, batch 4600, loss[loss=0.2527, simple_loss=0.2915, pruned_loss=0.107, over 4871.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.2883, pruned_loss=0.08705, over 954578.36 frames. ], batch size: 34, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:12:38,906 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16062.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:12:53,916 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16085.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:13:07,244 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6689, 1.7291, 0.7663, 1.3148, 1.8631, 1.5257, 1.4404, 1.5323], + device='cuda:0'), covar=tensor([0.0577, 0.0422, 0.0443, 0.0617, 0.0282, 0.0571, 0.0571, 0.0669], + device='cuda:0'), in_proj_covar=tensor([0.0032, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 14:13:08,933 INFO [finetune.py:976] (0/7) Epoch 3, batch 4650, loss[loss=0.1957, simple_loss=0.2561, pruned_loss=0.06767, over 4935.00 frames. ], tot_loss[loss=0.228, simple_loss=0.2844, pruned_loss=0.08579, over 953724.66 frames. ], batch size: 33, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:13:15,811 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.14 vs. limit=5.0 +2023-04-26 14:13:18,702 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.295e+02 1.883e+02 2.189e+02 2.586e+02 3.625e+02, threshold=4.377e+02, percent-clipped=0.0 +2023-04-26 14:13:35,057 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9105, 2.4122, 1.9606, 2.3115, 1.8145, 1.9525, 2.0820, 1.5880], + device='cuda:0'), covar=tensor([0.2307, 0.1393, 0.1245, 0.1478, 0.3288, 0.1510, 0.2098, 0.3002], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0336, 0.0247, 0.0310, 0.0322, 0.0289, 0.0279, 0.0301], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:13:47,594 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16146.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:13:49,918 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16149.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:13:53,997 INFO [finetune.py:976] (0/7) Epoch 3, batch 4700, loss[loss=0.2252, simple_loss=0.2761, pruned_loss=0.0872, over 4839.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2813, pruned_loss=0.08451, over 953667.91 frames. ], batch size: 33, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:14:11,132 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.47 vs. limit=5.0 +2023-04-26 14:14:15,855 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.80 vs. limit=5.0 +2023-04-26 14:14:38,783 INFO [finetune.py:976] (0/7) Epoch 3, batch 4750, loss[loss=0.2076, simple_loss=0.2664, pruned_loss=0.07445, over 4802.00 frames. ], tot_loss[loss=0.2242, simple_loss=0.2799, pruned_loss=0.08419, over 955023.73 frames. ], batch size: 25, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:14:47,137 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16210.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:15:00,398 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.300e+02 1.819e+02 2.117e+02 2.422e+02 6.375e+02, threshold=4.235e+02, percent-clipped=3.0 +2023-04-26 14:15:10,810 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16239.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:15:11,931 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5238, 1.9917, 1.4261, 1.1435, 1.2025, 1.1797, 1.3794, 1.1521], + device='cuda:0'), covar=tensor([0.2150, 0.1893, 0.2186, 0.2762, 0.3337, 0.2481, 0.1701, 0.2646], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0222, 0.0186, 0.0212, 0.0225, 0.0191, 0.0181, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 14:15:35,037 INFO [finetune.py:976] (0/7) Epoch 3, batch 4800, loss[loss=0.1967, simple_loss=0.2572, pruned_loss=0.0681, over 4806.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.2827, pruned_loss=0.08496, over 953662.22 frames. ], batch size: 25, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:16:12,094 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5335, 1.4671, 0.7143, 1.1992, 1.4877, 1.4234, 1.2858, 1.3106], + device='cuda:0'), covar=tensor([0.0548, 0.0406, 0.0461, 0.0600, 0.0312, 0.0543, 0.0539, 0.0626], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 14:16:42,029 INFO [finetune.py:976] (0/7) Epoch 3, batch 4850, loss[loss=0.2129, simple_loss=0.2669, pruned_loss=0.07939, over 4803.00 frames. ], tot_loss[loss=0.2289, simple_loss=0.2857, pruned_loss=0.08604, over 953332.14 frames. ], batch size: 25, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:16:44,625 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-26 14:17:03,614 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.365e+02 1.946e+02 2.319e+02 2.886e+02 4.382e+02, threshold=4.637e+02, percent-clipped=1.0 +2023-04-26 14:17:31,160 INFO [finetune.py:976] (0/7) Epoch 3, batch 4900, loss[loss=0.261, simple_loss=0.3087, pruned_loss=0.1067, over 4892.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.2873, pruned_loss=0.08694, over 952016.12 frames. ], batch size: 43, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:17:32,336 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16357.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:17:42,323 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16371.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:17:44,150 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6590, 1.1196, 4.4514, 4.1171, 3.9245, 4.2028, 4.1086, 3.9228], + device='cuda:0'), covar=tensor([0.7310, 0.6480, 0.1114, 0.1783, 0.1125, 0.1504, 0.1174, 0.1522], + device='cuda:0'), in_proj_covar=tensor([0.0327, 0.0314, 0.0439, 0.0441, 0.0373, 0.0426, 0.0335, 0.0394], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0002], + device='cuda:0') +2023-04-26 14:17:48,007 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-26 14:18:04,092 INFO [finetune.py:976] (0/7) Epoch 3, batch 4950, loss[loss=0.2231, simple_loss=0.278, pruned_loss=0.08405, over 4805.00 frames. ], tot_loss[loss=0.2345, simple_loss=0.2904, pruned_loss=0.08932, over 952085.25 frames. ], batch size: 25, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:18:05,349 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9935, 2.4629, 1.0861, 1.2654, 1.8405, 1.2116, 3.3384, 1.6511], + device='cuda:0'), covar=tensor([0.0716, 0.0831, 0.0984, 0.1334, 0.0618, 0.1078, 0.0229, 0.0686], + device='cuda:0'), in_proj_covar=tensor([0.0055, 0.0072, 0.0054, 0.0050, 0.0055, 0.0055, 0.0084, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 14:18:16,359 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.286e+02 1.926e+02 2.230e+02 2.725e+02 6.217e+02, threshold=4.460e+02, percent-clipped=3.0 +2023-04-26 14:18:21,910 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16432.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:18:27,287 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16441.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:18:37,162 INFO [finetune.py:976] (0/7) Epoch 3, batch 5000, loss[loss=0.2356, simple_loss=0.2827, pruned_loss=0.09424, over 4744.00 frames. ], tot_loss[loss=0.2317, simple_loss=0.2878, pruned_loss=0.08783, over 952850.00 frames. ], batch size: 54, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:18:47,611 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1413, 1.3138, 1.4569, 1.5543, 1.5556, 1.2470, 0.9828, 1.3289], + device='cuda:0'), covar=tensor([0.1101, 0.1428, 0.0829, 0.0687, 0.0761, 0.0995, 0.1139, 0.0744], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0211, 0.0189, 0.0184, 0.0184, 0.0200, 0.0174, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:18:57,538 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.68 vs. limit=5.0 +2023-04-26 14:19:09,883 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16505.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:19:10,439 INFO [finetune.py:976] (0/7) Epoch 3, batch 5050, loss[loss=0.2446, simple_loss=0.2914, pruned_loss=0.09891, over 4827.00 frames. ], tot_loss[loss=0.2296, simple_loss=0.2855, pruned_loss=0.08685, over 954389.10 frames. ], batch size: 33, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:19:23,721 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.221e+02 1.760e+02 2.084e+02 2.475e+02 5.733e+02, threshold=4.169e+02, percent-clipped=2.0 +2023-04-26 14:19:33,515 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16539.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:19:35,329 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6906, 1.7061, 0.8878, 1.2781, 2.0889, 1.5335, 1.3853, 1.4693], + device='cuda:0'), covar=tensor([0.0569, 0.0429, 0.0430, 0.0627, 0.0262, 0.0580, 0.0604, 0.0680], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 14:19:43,695 INFO [finetune.py:976] (0/7) Epoch 3, batch 5100, loss[loss=0.2494, simple_loss=0.2931, pruned_loss=0.1028, over 4820.00 frames. ], tot_loss[loss=0.2253, simple_loss=0.2814, pruned_loss=0.08463, over 954961.59 frames. ], batch size: 41, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:20:10,604 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=16587.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:20:20,178 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3295, 2.8999, 0.9534, 1.3513, 2.0976, 1.2999, 4.1579, 1.8880], + device='cuda:0'), covar=tensor([0.0702, 0.0915, 0.1036, 0.1409, 0.0637, 0.1085, 0.0165, 0.0684], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0071, 0.0053, 0.0050, 0.0055, 0.0055, 0.0083, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 14:20:33,132 INFO [finetune.py:976] (0/7) Epoch 3, batch 5150, loss[loss=0.2445, simple_loss=0.3118, pruned_loss=0.08866, over 4813.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.2832, pruned_loss=0.0862, over 954325.66 frames. ], batch size: 40, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:20:44,663 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5031, 3.4465, 0.9598, 1.9330, 1.7246, 2.2747, 1.9137, 1.0221], + device='cuda:0'), covar=tensor([0.1532, 0.0803, 0.2135, 0.1357, 0.1215, 0.1218, 0.1683, 0.2231], + device='cuda:0'), in_proj_covar=tensor([0.0124, 0.0267, 0.0149, 0.0131, 0.0141, 0.0164, 0.0128, 0.0131], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 14:20:56,490 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.263e+02 1.945e+02 2.270e+02 2.709e+02 5.209e+02, threshold=4.540e+02, percent-clipped=2.0 +2023-04-26 14:21:25,148 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5628, 1.7180, 1.6036, 1.8033, 1.6274, 1.8186, 1.6857, 1.6382], + device='cuda:0'), covar=tensor([1.0741, 2.0676, 1.7487, 1.4353, 1.6635, 2.5785, 2.2048, 1.9648], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0403, 0.0321, 0.0326, 0.0352, 0.0411, 0.0390, 0.0346], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 14:21:30,489 INFO [finetune.py:976] (0/7) Epoch 3, batch 5200, loss[loss=0.2156, simple_loss=0.2806, pruned_loss=0.07525, over 4763.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2863, pruned_loss=0.08682, over 955231.80 frames. ], batch size: 26, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:21:31,190 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16657.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:21:55,153 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-26 14:22:02,884 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=16705.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:22:03,452 INFO [finetune.py:976] (0/7) Epoch 3, batch 5250, loss[loss=0.2177, simple_loss=0.2799, pruned_loss=0.07773, over 4816.00 frames. ], tot_loss[loss=0.2307, simple_loss=0.288, pruned_loss=0.0867, over 953268.17 frames. ], batch size: 33, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:22:04,132 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5078, 3.9149, 0.6702, 2.0802, 1.8532, 2.3744, 2.2882, 0.9226], + device='cuda:0'), covar=tensor([0.1811, 0.1512, 0.2669, 0.1822, 0.1449, 0.1656, 0.1864, 0.2500], + device='cuda:0'), in_proj_covar=tensor([0.0122, 0.0264, 0.0148, 0.0129, 0.0139, 0.0162, 0.0126, 0.0129], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 14:22:14,739 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.228e+02 1.971e+02 2.303e+02 2.786e+02 5.327e+02, threshold=4.606e+02, percent-clipped=1.0 +2023-04-26 14:22:18,685 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=16727.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:22:27,612 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16741.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:22:47,370 INFO [finetune.py:976] (0/7) Epoch 3, batch 5300, loss[loss=0.1873, simple_loss=0.2517, pruned_loss=0.0615, over 4113.00 frames. ], tot_loss[loss=0.233, simple_loss=0.2901, pruned_loss=0.08796, over 952077.78 frames. ], batch size: 65, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:23:06,922 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-26 14:23:10,420 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=16789.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:23:20,214 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=16805.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:23:20,766 INFO [finetune.py:976] (0/7) Epoch 3, batch 5350, loss[loss=0.2031, simple_loss=0.2652, pruned_loss=0.07055, over 4758.00 frames. ], tot_loss[loss=0.233, simple_loss=0.2901, pruned_loss=0.088, over 951134.01 frames. ], batch size: 27, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:23:31,444 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.249e+02 1.831e+02 2.243e+02 2.677e+02 3.612e+02, threshold=4.486e+02, percent-clipped=0.0 +2023-04-26 14:23:52,041 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=16853.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:23:53,793 INFO [finetune.py:976] (0/7) Epoch 3, batch 5400, loss[loss=0.25, simple_loss=0.2987, pruned_loss=0.1006, over 4792.00 frames. ], tot_loss[loss=0.2308, simple_loss=0.2875, pruned_loss=0.08708, over 952370.02 frames. ], batch size: 51, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:24:01,173 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16868.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 14:24:27,334 INFO [finetune.py:976] (0/7) Epoch 3, batch 5450, loss[loss=0.1739, simple_loss=0.2471, pruned_loss=0.05039, over 4824.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.2832, pruned_loss=0.08525, over 952626.40 frames. ], batch size: 33, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:24:27,435 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=16906.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:24:37,664 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.358e+02 1.908e+02 2.254e+02 2.695e+02 6.070e+02, threshold=4.507e+02, percent-clipped=3.0 +2023-04-26 14:24:41,870 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16929.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 14:24:50,586 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 14:25:00,558 INFO [finetune.py:976] (0/7) Epoch 3, batch 5500, loss[loss=0.1974, simple_loss=0.2618, pruned_loss=0.06651, over 4750.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2807, pruned_loss=0.08479, over 953234.68 frames. ], batch size: 27, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:25:07,400 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=16967.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 14:25:33,762 INFO [finetune.py:976] (0/7) Epoch 3, batch 5550, loss[loss=0.1967, simple_loss=0.2576, pruned_loss=0.06793, over 4791.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2813, pruned_loss=0.08483, over 954226.49 frames. ], batch size: 25, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:25:54,589 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.318e+02 1.795e+02 2.241e+02 2.790e+02 4.152e+02, threshold=4.483e+02, percent-clipped=0.0 +2023-04-26 14:26:02,722 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17027.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:26:35,601 INFO [finetune.py:976] (0/7) Epoch 3, batch 5600, loss[loss=0.2174, simple_loss=0.2779, pruned_loss=0.07846, over 4837.00 frames. ], tot_loss[loss=0.2298, simple_loss=0.286, pruned_loss=0.08675, over 955104.95 frames. ], batch size: 30, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:26:59,004 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=17075.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:27:38,803 INFO [finetune.py:976] (0/7) Epoch 3, batch 5650, loss[loss=0.2704, simple_loss=0.331, pruned_loss=0.1049, over 4743.00 frames. ], tot_loss[loss=0.2305, simple_loss=0.2875, pruned_loss=0.08671, over 953364.59 frames. ], batch size: 54, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:27:48,199 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1262, 1.3903, 1.3098, 1.6956, 1.4217, 1.8302, 1.3324, 3.3133], + device='cuda:0'), covar=tensor([0.0881, 0.1091, 0.1073, 0.1432, 0.0899, 0.0894, 0.1061, 0.0278], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 14:27:55,218 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.353e+02 1.900e+02 2.198e+02 2.594e+02 5.346e+02, threshold=4.396e+02, percent-clipped=1.0 +2023-04-26 14:28:26,725 INFO [finetune.py:976] (0/7) Epoch 3, batch 5700, loss[loss=0.2631, simple_loss=0.2854, pruned_loss=0.1204, over 4177.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2843, pruned_loss=0.08714, over 933771.66 frames. ], batch size: 18, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:28:48,963 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-3.pt +2023-04-26 14:29:04,044 INFO [finetune.py:976] (0/7) Epoch 4, batch 0, loss[loss=0.3329, simple_loss=0.366, pruned_loss=0.1499, over 4837.00 frames. ], tot_loss[loss=0.3329, simple_loss=0.366, pruned_loss=0.1499, over 4837.00 frames. ], batch size: 44, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:29:04,045 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 14:29:17,919 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5179, 1.1338, 1.3256, 1.1761, 1.7828, 1.4269, 1.1320, 1.3011], + device='cuda:0'), covar=tensor([0.2171, 0.2070, 0.3176, 0.1985, 0.1370, 0.2085, 0.2808, 0.2753], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0334, 0.0348, 0.0309, 0.0345, 0.0353, 0.0314, 0.0351], + device='cuda:0'), out_proj_covar=tensor([6.7823e-05, 7.1689e-05, 7.5527e-05, 6.4718e-05, 7.3169e-05, 7.7032e-05, + 6.8408e-05, 7.5909e-05], device='cuda:0') +2023-04-26 14:29:26,719 INFO [finetune.py:1010] (0/7) Epoch 4, validation: loss=0.1686, simple_loss=0.2415, pruned_loss=0.04788, over 2265189.00 frames. +2023-04-26 14:29:26,720 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 14:29:31,446 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17189.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:29:52,774 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.687e+02 2.032e+02 2.492e+02 4.364e+02, threshold=4.064e+02, percent-clipped=0.0 +2023-04-26 14:29:53,448 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17224.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 14:29:54,745 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4065, 0.6413, 1.0121, 1.6892, 1.5473, 1.2658, 1.2699, 1.2800], + device='cuda:0'), covar=tensor([1.4617, 1.8717, 2.0593, 2.4188, 1.5876, 2.2678, 2.1923, 1.7590], + device='cuda:0'), in_proj_covar=tensor([0.0438, 0.0486, 0.0576, 0.0587, 0.0470, 0.0498, 0.0511, 0.0520], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:29:58,844 INFO [finetune.py:976] (0/7) Epoch 4, batch 50, loss[loss=0.2152, simple_loss=0.2748, pruned_loss=0.07787, over 4862.00 frames. ], tot_loss[loss=0.2315, simple_loss=0.2885, pruned_loss=0.08726, over 216437.10 frames. ], batch size: 34, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:30:11,287 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17250.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:30:16,193 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0576, 2.5504, 0.9053, 1.2971, 1.8973, 1.1471, 3.2275, 1.5208], + device='cuda:0'), covar=tensor([0.0694, 0.0733, 0.0940, 0.1226, 0.0532, 0.1066, 0.0297, 0.0727], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0071, 0.0053, 0.0050, 0.0055, 0.0055, 0.0084, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 14:30:18,608 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17262.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 14:30:31,853 INFO [finetune.py:976] (0/7) Epoch 4, batch 100, loss[loss=0.213, simple_loss=0.2721, pruned_loss=0.07689, over 4890.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.2822, pruned_loss=0.08521, over 381602.47 frames. ], batch size: 32, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:30:58,841 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.312e+02 1.837e+02 2.342e+02 2.858e+02 5.078e+02, threshold=4.684e+02, percent-clipped=3.0 +2023-04-26 14:31:04,969 INFO [finetune.py:976] (0/7) Epoch 4, batch 150, loss[loss=0.228, simple_loss=0.271, pruned_loss=0.09255, over 4833.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2767, pruned_loss=0.08318, over 509577.93 frames. ], batch size: 25, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:31:24,756 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17362.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:31:33,377 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-26 14:31:38,037 INFO [finetune.py:976] (0/7) Epoch 4, batch 200, loss[loss=0.2313, simple_loss=0.2747, pruned_loss=0.094, over 4902.00 frames. ], tot_loss[loss=0.2198, simple_loss=0.2744, pruned_loss=0.08262, over 608557.49 frames. ], batch size: 36, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:31:38,164 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17383.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 14:32:02,344 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.95 vs. limit=5.0 +2023-04-26 14:32:03,452 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5693, 1.4426, 0.7176, 1.2369, 1.9031, 1.4075, 1.3418, 1.4476], + device='cuda:0'), covar=tensor([0.0587, 0.0470, 0.0465, 0.0636, 0.0287, 0.0598, 0.0596, 0.0678], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 14:32:05,046 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 2.027e+02 2.314e+02 2.814e+02 1.019e+03, threshold=4.629e+02, percent-clipped=4.0 +2023-04-26 14:32:05,189 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17423.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:32:11,142 INFO [finetune.py:976] (0/7) Epoch 4, batch 250, loss[loss=0.2762, simple_loss=0.3295, pruned_loss=0.1114, over 4815.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2799, pruned_loss=0.08385, over 686744.96 frames. ], batch size: 38, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:32:25,183 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17444.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 14:33:16,611 INFO [finetune.py:976] (0/7) Epoch 4, batch 300, loss[loss=0.2616, simple_loss=0.3152, pruned_loss=0.104, over 4817.00 frames. ], tot_loss[loss=0.2284, simple_loss=0.2848, pruned_loss=0.08596, over 748285.24 frames. ], batch size: 40, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:34:04,407 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.938e+02 2.299e+02 2.709e+02 4.777e+02, threshold=4.598e+02, percent-clipped=1.0 +2023-04-26 14:34:05,156 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17524.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 14:34:22,591 INFO [finetune.py:976] (0/7) Epoch 4, batch 350, loss[loss=0.2456, simple_loss=0.3086, pruned_loss=0.09132, over 4921.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2874, pruned_loss=0.08642, over 794681.49 frames. ], batch size: 42, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:34:35,788 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17545.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:34:58,558 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7787, 0.9504, 1.1932, 1.3273, 1.3432, 1.5250, 1.2394, 1.2376], + device='cuda:0'), covar=tensor([0.9557, 1.4656, 1.2545, 1.1338, 1.3453, 2.1315, 1.5675, 1.3239], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0400, 0.0319, 0.0324, 0.0350, 0.0411, 0.0386, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 14:34:59,119 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17562.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 14:35:10,340 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=17572.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 14:35:17,455 INFO [finetune.py:976] (0/7) Epoch 4, batch 400, loss[loss=0.2319, simple_loss=0.2821, pruned_loss=0.09086, over 4760.00 frames. ], tot_loss[loss=0.23, simple_loss=0.2876, pruned_loss=0.08615, over 829888.78 frames. ], batch size: 26, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:35:24,474 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 14:35:36,589 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5858, 1.9568, 1.5264, 1.1961, 1.2676, 1.2508, 1.4675, 1.1936], + device='cuda:0'), covar=tensor([0.2024, 0.1683, 0.2070, 0.2478, 0.3167, 0.2429, 0.1570, 0.2552], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0221, 0.0185, 0.0210, 0.0222, 0.0191, 0.0179, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 14:35:37,138 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=17610.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:35:45,055 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.309e+02 1.823e+02 2.162e+02 2.599e+02 8.047e+02, threshold=4.324e+02, percent-clipped=1.0 +2023-04-26 14:35:51,211 INFO [finetune.py:976] (0/7) Epoch 4, batch 450, loss[loss=0.2139, simple_loss=0.2751, pruned_loss=0.07635, over 4902.00 frames. ], tot_loss[loss=0.229, simple_loss=0.2866, pruned_loss=0.08574, over 858000.77 frames. ], batch size: 37, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:36:00,449 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0337, 2.4516, 0.8540, 1.4932, 1.4335, 1.9353, 1.6193, 0.8606], + device='cuda:0'), covar=tensor([0.1406, 0.1180, 0.1715, 0.1309, 0.1060, 0.0941, 0.1530, 0.1730], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0266, 0.0149, 0.0129, 0.0140, 0.0162, 0.0127, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 14:36:20,898 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3356, 3.0117, 0.9685, 1.6120, 1.7825, 2.2983, 1.8781, 1.0494], + device='cuda:0'), covar=tensor([0.1596, 0.1447, 0.2128, 0.1586, 0.1189, 0.1125, 0.1700, 0.1796], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0266, 0.0149, 0.0129, 0.0141, 0.0162, 0.0127, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 14:36:25,011 INFO [finetune.py:976] (0/7) Epoch 4, batch 500, loss[loss=0.2617, simple_loss=0.3075, pruned_loss=0.1079, over 4871.00 frames. ], tot_loss[loss=0.2268, simple_loss=0.2836, pruned_loss=0.08504, over 879749.04 frames. ], batch size: 34, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:36:35,722 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17700.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:36:45,484 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-26 14:36:48,886 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8355, 1.2545, 3.3191, 3.0939, 3.0019, 3.2039, 3.2281, 2.9505], + device='cuda:0'), covar=tensor([0.7162, 0.5490, 0.1488, 0.1960, 0.1447, 0.1859, 0.1502, 0.1742], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0314, 0.0433, 0.0435, 0.0370, 0.0421, 0.0331, 0.0389], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:36:49,484 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17718.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:36:52,485 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.778e+02 2.181e+02 2.733e+02 7.061e+02, threshold=4.362e+02, percent-clipped=3.0 +2023-04-26 14:36:56,851 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17730.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:36:58,596 INFO [finetune.py:976] (0/7) Epoch 4, batch 550, loss[loss=0.221, simple_loss=0.2776, pruned_loss=0.08219, over 4901.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2817, pruned_loss=0.08467, over 896149.66 frames. ], batch size: 37, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:37:02,831 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=17739.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 14:37:10,726 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.0227, 4.0414, 2.9423, 4.6690, 4.0316, 4.0801, 1.7830, 3.8940], + device='cuda:0'), covar=tensor([0.1667, 0.1122, 0.2694, 0.1435, 0.3174, 0.1707, 0.5797, 0.2058], + device='cuda:0'), in_proj_covar=tensor([0.0252, 0.0224, 0.0265, 0.0319, 0.0312, 0.0262, 0.0278, 0.0280], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 14:37:18,638 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17761.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:37:43,915 INFO [finetune.py:976] (0/7) Epoch 4, batch 600, loss[loss=0.1981, simple_loss=0.2548, pruned_loss=0.07069, over 4762.00 frames. ], tot_loss[loss=0.2251, simple_loss=0.2815, pruned_loss=0.08439, over 910176.47 frames. ], batch size: 26, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:37:49,320 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=17791.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:38:23,099 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.336e+02 1.937e+02 2.249e+02 2.763e+02 6.989e+02, threshold=4.497e+02, percent-clipped=2.0 +2023-04-26 14:38:34,870 INFO [finetune.py:976] (0/7) Epoch 4, batch 650, loss[loss=0.2274, simple_loss=0.2967, pruned_loss=0.07907, over 4803.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.2861, pruned_loss=0.08647, over 919891.76 frames. ], batch size: 45, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:38:47,242 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=17845.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:39:12,167 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5425, 1.7005, 1.5064, 1.7154, 1.5706, 1.7876, 1.6360, 1.5817], + device='cuda:0'), covar=tensor([1.0771, 1.9370, 1.6009, 1.3385, 1.6003, 2.5245, 2.0856, 1.7826], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0402, 0.0320, 0.0326, 0.0352, 0.0413, 0.0387, 0.0343], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 14:39:12,819 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.76 vs. limit=2.0 +2023-04-26 14:39:22,791 INFO [finetune.py:976] (0/7) Epoch 4, batch 700, loss[loss=0.2913, simple_loss=0.3114, pruned_loss=0.1356, over 3954.00 frames. ], tot_loss[loss=0.2309, simple_loss=0.2879, pruned_loss=0.08692, over 927903.05 frames. ], batch size: 17, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:39:28,920 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=17893.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:40:07,831 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.332e+02 1.898e+02 2.217e+02 2.664e+02 7.094e+02, threshold=4.434e+02, percent-clipped=3.0 +2023-04-26 14:40:19,659 INFO [finetune.py:976] (0/7) Epoch 4, batch 750, loss[loss=0.2452, simple_loss=0.3037, pruned_loss=0.09336, over 4885.00 frames. ], tot_loss[loss=0.2331, simple_loss=0.2902, pruned_loss=0.08797, over 936108.02 frames. ], batch size: 43, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:40:43,540 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3223, 1.4659, 1.3603, 1.5324, 1.4362, 1.6920, 1.5575, 1.5152], + device='cuda:0'), covar=tensor([0.9919, 1.7748, 1.5688, 1.2911, 1.5622, 2.4198, 1.8993, 1.6487], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0403, 0.0321, 0.0326, 0.0352, 0.0413, 0.0388, 0.0343], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 14:40:53,855 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=17960.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:41:24,410 INFO [finetune.py:976] (0/7) Epoch 4, batch 800, loss[loss=0.2508, simple_loss=0.3087, pruned_loss=0.0965, over 4889.00 frames. ], tot_loss[loss=0.2293, simple_loss=0.2868, pruned_loss=0.08585, over 939953.20 frames. ], batch size: 32, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:41:35,272 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-18000.pt +2023-04-26 14:41:48,926 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18018.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:41:51,293 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18021.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:41:52,409 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.866e+02 2.231e+02 2.895e+02 6.246e+02, threshold=4.462e+02, percent-clipped=2.0 +2023-04-26 14:41:58,981 INFO [finetune.py:976] (0/7) Epoch 4, batch 850, loss[loss=0.2406, simple_loss=0.2975, pruned_loss=0.0919, over 4822.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.2855, pruned_loss=0.08594, over 942451.18 frames. ], batch size: 38, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:41:59,852 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-26 14:42:02,670 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18039.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 14:42:07,585 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9521, 1.4647, 1.3097, 1.8857, 2.1393, 1.8369, 1.7783, 1.4114], + device='cuda:0'), covar=tensor([0.2485, 0.1972, 0.2201, 0.2075, 0.1428, 0.2214, 0.2549, 0.2099], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0336, 0.0350, 0.0309, 0.0346, 0.0355, 0.0316, 0.0351], + device='cuda:0'), out_proj_covar=tensor([6.8481e-05, 7.2017e-05, 7.5896e-05, 6.4644e-05, 7.3500e-05, 7.7404e-05, + 6.8644e-05, 7.5854e-05], device='cuda:0') +2023-04-26 14:42:12,381 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7037, 2.0058, 1.7409, 1.9258, 1.5952, 1.7055, 1.7970, 1.3194], + device='cuda:0'), covar=tensor([0.1822, 0.1404, 0.0981, 0.1313, 0.3251, 0.1330, 0.1805, 0.2587], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0336, 0.0246, 0.0309, 0.0323, 0.0288, 0.0277, 0.0301], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:42:13,526 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18056.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:42:20,043 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=18066.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:42:32,276 INFO [finetune.py:976] (0/7) Epoch 4, batch 900, loss[loss=0.194, simple_loss=0.2518, pruned_loss=0.06807, over 4840.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2819, pruned_loss=0.08432, over 945681.29 frames. ], batch size: 49, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:42:34,141 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18086.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:42:34,719 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=18087.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 14:42:58,116 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.952e+02 2.266e+02 2.638e+02 7.997e+02, threshold=4.531e+02, percent-clipped=4.0 +2023-04-26 14:43:05,176 INFO [finetune.py:976] (0/7) Epoch 4, batch 950, loss[loss=0.2435, simple_loss=0.2898, pruned_loss=0.09858, over 4823.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2815, pruned_loss=0.085, over 947400.59 frames. ], batch size: 33, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:43:11,634 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-26 14:43:26,734 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7672, 1.7051, 2.0468, 1.9998, 1.9384, 1.6841, 1.7993, 1.8798], + device='cuda:0'), covar=tensor([1.7611, 2.2685, 2.7308, 2.9017, 1.8945, 2.9458, 3.1840, 2.3306], + device='cuda:0'), in_proj_covar=tensor([0.0437, 0.0483, 0.0573, 0.0588, 0.0467, 0.0497, 0.0509, 0.0517], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:43:37,468 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7757, 1.3767, 1.5510, 2.2125, 2.1785, 1.6391, 1.2816, 1.7947], + device='cuda:0'), covar=tensor([0.1106, 0.1658, 0.1106, 0.0663, 0.0680, 0.1162, 0.1256, 0.0821], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0211, 0.0188, 0.0184, 0.0184, 0.0200, 0.0173, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:43:38,576 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18174.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:43:44,764 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18175.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:43:53,139 INFO [finetune.py:976] (0/7) Epoch 4, batch 1000, loss[loss=0.2552, simple_loss=0.3036, pruned_loss=0.1034, over 4904.00 frames. ], tot_loss[loss=0.2278, simple_loss=0.2843, pruned_loss=0.08564, over 951081.98 frames. ], batch size: 32, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:44:28,878 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9510, 0.5166, 0.8381, 0.5896, 1.1491, 0.8893, 0.7507, 0.8958], + device='cuda:0'), covar=tensor([0.1757, 0.1859, 0.2140, 0.1851, 0.1104, 0.1454, 0.1883, 0.1990], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0337, 0.0351, 0.0310, 0.0347, 0.0354, 0.0316, 0.0351], + device='cuda:0'), out_proj_covar=tensor([6.8392e-05, 7.2230e-05, 7.6017e-05, 6.4813e-05, 7.3599e-05, 7.7231e-05, + 6.8635e-05, 7.5812e-05], device='cuda:0') +2023-04-26 14:44:30,497 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.905e+02 2.183e+02 2.540e+02 5.721e+02, threshold=4.367e+02, percent-clipped=1.0 +2023-04-26 14:44:38,563 INFO [finetune.py:976] (0/7) Epoch 4, batch 1050, loss[loss=0.2642, simple_loss=0.3141, pruned_loss=0.1071, over 4800.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.287, pruned_loss=0.08621, over 951958.65 frames. ], batch size: 45, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:44:39,864 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18235.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:44:40,476 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18236.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:44:43,481 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5957, 1.8130, 1.6197, 1.8121, 1.6120, 1.8815, 1.7075, 1.7019], + device='cuda:0'), covar=tensor([1.0258, 1.9192, 1.6476, 1.3365, 1.5524, 2.3123, 2.1307, 1.7635], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0402, 0.0320, 0.0325, 0.0350, 0.0412, 0.0386, 0.0341], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 14:45:12,450 INFO [finetune.py:976] (0/7) Epoch 4, batch 1100, loss[loss=0.2473, simple_loss=0.3067, pruned_loss=0.09392, over 4890.00 frames. ], tot_loss[loss=0.2311, simple_loss=0.2886, pruned_loss=0.08683, over 951846.06 frames. ], batch size: 36, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:45:38,608 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-26 14:45:50,514 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18316.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:46:00,501 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.371e+02 2.039e+02 2.386e+02 2.843e+02 4.542e+02, threshold=4.773e+02, percent-clipped=1.0 +2023-04-26 14:46:08,162 INFO [finetune.py:976] (0/7) Epoch 4, batch 1150, loss[loss=0.2115, simple_loss=0.2721, pruned_loss=0.0755, over 4767.00 frames. ], tot_loss[loss=0.2301, simple_loss=0.2879, pruned_loss=0.08614, over 953211.81 frames. ], batch size: 26, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:46:23,816 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18356.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:46:52,607 INFO [finetune.py:976] (0/7) Epoch 4, batch 1200, loss[loss=0.209, simple_loss=0.2597, pruned_loss=0.07916, over 4817.00 frames. ], tot_loss[loss=0.2277, simple_loss=0.2852, pruned_loss=0.08512, over 953166.15 frames. ], batch size: 39, lr: 3.97e-03, grad_scale: 16.0 +2023-04-26 14:46:55,527 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18386.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:47:03,487 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-26 14:47:08,298 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4506, 1.5781, 1.5192, 1.6532, 1.5685, 1.8245, 1.6055, 1.5698], + device='cuda:0'), covar=tensor([0.9854, 1.6634, 1.3146, 1.2128, 1.4648, 2.3076, 1.8762, 1.4817], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0405, 0.0322, 0.0329, 0.0354, 0.0416, 0.0389, 0.0345], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 14:47:18,672 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=18404.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:47:36,012 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.337e+02 1.888e+02 2.199e+02 2.566e+02 6.503e+02, threshold=4.398e+02, percent-clipped=1.0 +2023-04-26 14:47:42,651 INFO [finetune.py:976] (0/7) Epoch 4, batch 1250, loss[loss=0.1705, simple_loss=0.2295, pruned_loss=0.05571, over 4732.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2823, pruned_loss=0.08367, over 954147.46 frames. ], batch size: 23, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:47:43,319 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=18434.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:48:07,111 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6901, 1.7517, 1.8657, 2.0763, 2.0475, 1.5728, 1.2649, 1.7739], + device='cuda:0'), covar=tensor([0.0995, 0.1189, 0.0727, 0.0641, 0.0690, 0.1111, 0.1144, 0.0706], + device='cuda:0'), in_proj_covar=tensor([0.0209, 0.0210, 0.0187, 0.0183, 0.0183, 0.0200, 0.0173, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:48:15,942 INFO [finetune.py:976] (0/7) Epoch 4, batch 1300, loss[loss=0.2011, simple_loss=0.2431, pruned_loss=0.07955, over 4787.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2788, pruned_loss=0.08208, over 954781.47 frames. ], batch size: 26, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:48:21,243 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18490.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:48:34,008 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18508.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 14:48:43,005 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.221e+02 1.895e+02 2.151e+02 2.623e+02 4.474e+02, threshold=4.301e+02, percent-clipped=1.0 +2023-04-26 14:48:47,801 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18530.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:48:48,418 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18531.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:48:49,575 INFO [finetune.py:976] (0/7) Epoch 4, batch 1350, loss[loss=0.2192, simple_loss=0.2918, pruned_loss=0.07327, over 4821.00 frames. ], tot_loss[loss=0.2228, simple_loss=0.2798, pruned_loss=0.08292, over 955259.25 frames. ], batch size: 39, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:49:08,062 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18551.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:49:31,997 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18569.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 14:49:45,979 INFO [finetune.py:976] (0/7) Epoch 4, batch 1400, loss[loss=0.1831, simple_loss=0.2489, pruned_loss=0.05864, over 4791.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.2846, pruned_loss=0.08465, over 955995.85 frames. ], batch size: 26, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:49:46,676 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2442, 1.4491, 1.4763, 1.5673, 1.5574, 1.2506, 0.9514, 1.3830], + device='cuda:0'), covar=tensor([0.1079, 0.1233, 0.0891, 0.0749, 0.0785, 0.1078, 0.1201, 0.0780], + device='cuda:0'), in_proj_covar=tensor([0.0210, 0.0211, 0.0189, 0.0185, 0.0184, 0.0201, 0.0174, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:49:51,014 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-26 14:50:09,115 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18616.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:50:13,226 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.222e+02 1.882e+02 2.176e+02 2.822e+02 8.396e+02, threshold=4.353e+02, percent-clipped=2.0 +2023-04-26 14:50:19,721 INFO [finetune.py:976] (0/7) Epoch 4, batch 1450, loss[loss=0.2123, simple_loss=0.2774, pruned_loss=0.07359, over 4926.00 frames. ], tot_loss[loss=0.2287, simple_loss=0.2866, pruned_loss=0.08541, over 956226.12 frames. ], batch size: 33, lr: 3.97e-03, grad_scale: 32.0 +2023-04-26 14:50:40,787 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=18664.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:50:52,614 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18674.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:51:01,392 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.31 vs. limit=5.0 +2023-04-26 14:51:03,065 INFO [finetune.py:976] (0/7) Epoch 4, batch 1500, loss[loss=0.2889, simple_loss=0.3229, pruned_loss=0.1275, over 4860.00 frames. ], tot_loss[loss=0.2312, simple_loss=0.2885, pruned_loss=0.08696, over 954880.48 frames. ], batch size: 31, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:51:23,436 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.74 vs. limit=2.0 +2023-04-26 14:51:57,996 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.902e+02 2.274e+02 2.776e+02 4.497e+02, threshold=4.548e+02, percent-clipped=1.0 +2023-04-26 14:52:09,749 INFO [finetune.py:976] (0/7) Epoch 4, batch 1550, loss[loss=0.2809, simple_loss=0.3275, pruned_loss=0.1172, over 4886.00 frames. ], tot_loss[loss=0.2306, simple_loss=0.2881, pruned_loss=0.08656, over 953333.26 frames. ], batch size: 32, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:52:11,114 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18735.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:52:53,651 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=2.06 vs. limit=2.0 +2023-04-26 14:53:12,329 INFO [finetune.py:976] (0/7) Epoch 4, batch 1600, loss[loss=0.1443, simple_loss=0.2195, pruned_loss=0.03455, over 4782.00 frames. ], tot_loss[loss=0.2281, simple_loss=0.2851, pruned_loss=0.0855, over 953482.40 frames. ], batch size: 29, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:53:12,478 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6171, 2.2962, 1.5499, 1.4089, 1.1874, 1.2321, 1.6156, 1.1579], + device='cuda:0'), covar=tensor([0.2237, 0.1727, 0.2220, 0.2698, 0.3460, 0.2510, 0.1605, 0.2815], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0220, 0.0183, 0.0209, 0.0220, 0.0189, 0.0176, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 14:53:25,286 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1361, 1.9887, 1.7119, 1.7467, 2.2422, 1.5928, 2.6745, 1.4314], + device='cuda:0'), covar=tensor([0.4342, 0.2121, 0.5771, 0.3638, 0.1893, 0.3013, 0.1646, 0.5215], + device='cuda:0'), in_proj_covar=tensor([0.0356, 0.0358, 0.0443, 0.0374, 0.0407, 0.0385, 0.0401, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:53:44,152 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 2.049e+02 2.445e+02 2.756e+02 4.210e+02, threshold=4.889e+02, percent-clipped=0.0 +2023-04-26 14:53:45,471 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3943, 1.7372, 1.5207, 2.1278, 1.8397, 2.0482, 1.4421, 4.2947], + device='cuda:0'), covar=tensor([0.0666, 0.0730, 0.0858, 0.1190, 0.0694, 0.0625, 0.0819, 0.0089], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0041, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 14:53:48,506 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18830.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:53:49,086 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=18831.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:53:50,211 INFO [finetune.py:976] (0/7) Epoch 4, batch 1650, loss[loss=0.2181, simple_loss=0.2782, pruned_loss=0.07898, over 4841.00 frames. ], tot_loss[loss=0.2238, simple_loss=0.2807, pruned_loss=0.08345, over 952959.12 frames. ], batch size: 30, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:53:50,922 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9970, 3.0324, 2.4079, 2.6076, 2.1743, 2.3705, 2.5837, 1.9568], + device='cuda:0'), covar=tensor([0.3080, 0.1488, 0.1029, 0.1690, 0.3373, 0.1720, 0.2224, 0.3682], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0335, 0.0246, 0.0310, 0.0324, 0.0289, 0.0277, 0.0301], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:53:54,038 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-26 14:53:58,584 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18846.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:54:11,008 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=18864.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 14:54:20,474 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=18878.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:54:21,068 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=18879.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:54:23,388 INFO [finetune.py:976] (0/7) Epoch 4, batch 1700, loss[loss=0.2589, simple_loss=0.3112, pruned_loss=0.1033, over 4905.00 frames. ], tot_loss[loss=0.2224, simple_loss=0.2795, pruned_loss=0.08267, over 955725.21 frames. ], batch size: 36, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:54:23,574 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.28 vs. limit=5.0 +2023-04-26 14:54:23,598 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.78 vs. limit=5.0 +2023-04-26 14:55:17,077 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.925e+02 2.287e+02 2.733e+02 5.363e+02, threshold=4.574e+02, percent-clipped=1.0 +2023-04-26 14:55:20,896 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.58 vs. limit=5.0 +2023-04-26 14:55:21,944 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=18930.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:55:23,647 INFO [finetune.py:976] (0/7) Epoch 4, batch 1750, loss[loss=0.2855, simple_loss=0.3445, pruned_loss=0.1132, over 4867.00 frames. ], tot_loss[loss=0.2258, simple_loss=0.2832, pruned_loss=0.08419, over 955507.52 frames. ], batch size: 44, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:55:30,441 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8689, 1.3184, 4.9619, 4.6699, 4.3573, 4.6933, 4.3230, 4.4208], + device='cuda:0'), covar=tensor([0.6559, 0.5803, 0.0917, 0.1657, 0.1037, 0.1132, 0.1640, 0.1301], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0306, 0.0426, 0.0428, 0.0364, 0.0415, 0.0328, 0.0383], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 14:55:56,819 INFO [finetune.py:976] (0/7) Epoch 4, batch 1800, loss[loss=0.2567, simple_loss=0.3191, pruned_loss=0.09715, over 4808.00 frames. ], tot_loss[loss=0.2297, simple_loss=0.2876, pruned_loss=0.08591, over 958019.15 frames. ], batch size: 40, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:56:01,892 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=18991.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:56:44,253 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.520e+02 1.971e+02 2.444e+02 3.087e+02 4.882e+02, threshold=4.887e+02, percent-clipped=3.0 +2023-04-26 14:56:45,016 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9923, 2.5764, 2.1953, 2.4186, 1.8759, 2.0667, 2.2081, 1.7060], + device='cuda:0'), covar=tensor([0.2088, 0.1653, 0.0975, 0.1309, 0.3046, 0.1442, 0.1898, 0.2974], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0333, 0.0243, 0.0307, 0.0321, 0.0286, 0.0275, 0.0297], + device='cuda:0'), out_proj_covar=tensor([1.2823e-04, 1.3555e-04, 9.9207e-05, 1.2375e-04, 1.3244e-04, 1.1545e-04, + 1.1361e-04, 1.2035e-04], device='cuda:0') +2023-04-26 14:56:49,156 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19030.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:56:50,954 INFO [finetune.py:976] (0/7) Epoch 4, batch 1850, loss[loss=0.2086, simple_loss=0.2761, pruned_loss=0.07048, over 4816.00 frames. ], tot_loss[loss=0.2292, simple_loss=0.2875, pruned_loss=0.08551, over 957690.80 frames. ], batch size: 38, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:57:13,079 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3433, 2.6113, 1.2369, 1.6307, 2.2962, 1.4536, 3.4173, 1.8764], + device='cuda:0'), covar=tensor([0.0605, 0.0552, 0.0740, 0.1236, 0.0464, 0.0940, 0.0313, 0.0623], + device='cuda:0'), in_proj_covar=tensor([0.0055, 0.0071, 0.0053, 0.0050, 0.0055, 0.0055, 0.0084, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 14:57:24,084 INFO [finetune.py:976] (0/7) Epoch 4, batch 1900, loss[loss=0.2064, simple_loss=0.2812, pruned_loss=0.06582, over 4894.00 frames. ], tot_loss[loss=0.2295, simple_loss=0.288, pruned_loss=0.08553, over 957529.89 frames. ], batch size: 43, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:57:50,672 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.323e+02 1.880e+02 2.172e+02 2.548e+02 4.187e+02, threshold=4.344e+02, percent-clipped=0.0 +2023-04-26 14:57:54,928 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19129.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 14:57:57,174 INFO [finetune.py:976] (0/7) Epoch 4, batch 1950, loss[loss=0.2727, simple_loss=0.3118, pruned_loss=0.1168, over 4348.00 frames. ], tot_loss[loss=0.2269, simple_loss=0.2852, pruned_loss=0.08435, over 956238.95 frames. ], batch size: 66, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:58:05,510 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19146.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:58:17,022 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19164.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 14:58:30,342 INFO [finetune.py:976] (0/7) Epoch 4, batch 2000, loss[loss=0.2297, simple_loss=0.2849, pruned_loss=0.08726, over 4806.00 frames. ], tot_loss[loss=0.2245, simple_loss=0.2822, pruned_loss=0.08343, over 958063.32 frames. ], batch size: 51, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:58:40,472 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19190.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 14:58:48,408 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=19194.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 14:58:59,211 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-26 14:59:10,843 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=19212.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 14:59:24,210 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.742e+02 2.062e+02 2.571e+02 4.374e+02, threshold=4.123e+02, percent-clipped=1.0 +2023-04-26 14:59:25,114 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 14:59:36,722 INFO [finetune.py:976] (0/7) Epoch 4, batch 2050, loss[loss=0.2264, simple_loss=0.2726, pruned_loss=0.09005, over 4919.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2778, pruned_loss=0.08145, over 959322.86 frames. ], batch size: 37, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 14:59:53,466 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6163, 3.7867, 0.8444, 2.0354, 2.1732, 2.4987, 2.3546, 1.0394], + device='cuda:0'), covar=tensor([0.1336, 0.0875, 0.2186, 0.1337, 0.0999, 0.1119, 0.1168, 0.2215], + device='cuda:0'), in_proj_covar=tensor([0.0122, 0.0264, 0.0148, 0.0129, 0.0139, 0.0161, 0.0126, 0.0129], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 15:00:03,711 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8166, 1.3986, 1.6537, 1.5642, 1.5192, 1.2893, 0.6076, 1.2816], + device='cuda:0'), covar=tensor([0.4771, 0.5305, 0.2540, 0.3448, 0.4317, 0.3743, 0.6138, 0.3691], + device='cuda:0'), in_proj_covar=tensor([0.0274, 0.0268, 0.0223, 0.0338, 0.0226, 0.0232, 0.0253, 0.0202], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 15:00:15,255 INFO [finetune.py:976] (0/7) Epoch 4, batch 2100, loss[loss=0.2086, simple_loss=0.2863, pruned_loss=0.06544, over 4907.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2783, pruned_loss=0.08195, over 958861.41 frames. ], batch size: 37, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:00:17,154 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19286.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:00:36,841 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6328, 1.4178, 1.6536, 1.9041, 1.7708, 1.5728, 1.6711, 1.6283], + device='cuda:0'), covar=tensor([1.3534, 1.7830, 1.9864, 2.2904, 1.5951, 2.2519, 2.3114, 1.8852], + device='cuda:0'), in_proj_covar=tensor([0.0433, 0.0478, 0.0566, 0.0585, 0.0464, 0.0492, 0.0504, 0.0512], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:00:45,252 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2294, 2.6797, 1.7334, 2.0868, 2.6284, 2.1156, 2.0968, 2.2408], + device='cuda:0'), covar=tensor([0.0481, 0.0326, 0.0296, 0.0494, 0.0216, 0.0498, 0.0509, 0.0542], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0025, 0.0031, 0.0022, 0.0031, 0.0030, 0.0032], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 15:01:01,581 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.066e+02 1.821e+02 2.202e+02 2.671e+02 7.978e+02, threshold=4.404e+02, percent-clipped=3.0 +2023-04-26 15:01:07,019 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19330.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:01:14,462 INFO [finetune.py:976] (0/7) Epoch 4, batch 2150, loss[loss=0.2613, simple_loss=0.3188, pruned_loss=0.1019, over 4832.00 frames. ], tot_loss[loss=0.2266, simple_loss=0.2836, pruned_loss=0.08478, over 955423.40 frames. ], batch size: 49, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:01:28,491 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-26 15:02:07,227 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=19378.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:02:16,423 INFO [finetune.py:976] (0/7) Epoch 4, batch 2200, loss[loss=0.2295, simple_loss=0.2858, pruned_loss=0.08667, over 4883.00 frames. ], tot_loss[loss=0.227, simple_loss=0.285, pruned_loss=0.0845, over 955409.63 frames. ], batch size: 32, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:02:30,485 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19394.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:02:46,214 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8100, 2.3415, 1.8765, 2.2129, 1.7311, 1.7996, 1.9666, 1.6064], + device='cuda:0'), covar=tensor([0.2336, 0.1437, 0.1112, 0.1357, 0.3232, 0.1510, 0.1945, 0.2685], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0330, 0.0241, 0.0305, 0.0319, 0.0284, 0.0273, 0.0295], + device='cuda:0'), out_proj_covar=tensor([1.2754e-04, 1.3449e-04, 9.8248e-05, 1.2290e-04, 1.3176e-04, 1.1488e-04, + 1.1264e-04, 1.1922e-04], device='cuda:0') +2023-04-26 15:03:06,439 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.815e+02 2.166e+02 2.683e+02 4.330e+02, threshold=4.331e+02, percent-clipped=0.0 +2023-04-26 15:03:18,529 INFO [finetune.py:976] (0/7) Epoch 4, batch 2250, loss[loss=0.2718, simple_loss=0.3249, pruned_loss=0.1093, over 4727.00 frames. ], tot_loss[loss=0.2274, simple_loss=0.2854, pruned_loss=0.08466, over 953620.29 frames. ], batch size: 54, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:03:45,030 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.58 vs. limit=5.0 +2023-04-26 15:03:51,635 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19455.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:04:21,506 INFO [finetune.py:976] (0/7) Epoch 4, batch 2300, loss[loss=0.2756, simple_loss=0.3298, pruned_loss=0.1107, over 4842.00 frames. ], tot_loss[loss=0.2263, simple_loss=0.285, pruned_loss=0.08378, over 954905.53 frames. ], batch size: 49, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:04:22,794 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19485.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 15:04:58,953 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.751e+02 2.123e+02 2.573e+02 6.035e+02, threshold=4.246e+02, percent-clipped=1.0 +2023-04-26 15:05:05,524 INFO [finetune.py:976] (0/7) Epoch 4, batch 2350, loss[loss=0.1542, simple_loss=0.211, pruned_loss=0.04873, over 4685.00 frames. ], tot_loss[loss=0.2233, simple_loss=0.2817, pruned_loss=0.08238, over 954884.13 frames. ], batch size: 23, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:05:39,105 INFO [finetune.py:976] (0/7) Epoch 4, batch 2400, loss[loss=0.2001, simple_loss=0.2542, pruned_loss=0.07305, over 4875.00 frames. ], tot_loss[loss=0.2201, simple_loss=0.2787, pruned_loss=0.08076, over 956048.74 frames. ], batch size: 34, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:05:39,856 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4233, 1.6045, 1.5871, 2.1688, 2.5078, 2.0328, 1.8602, 1.7715], + device='cuda:0'), covar=tensor([0.1849, 0.2334, 0.2467, 0.1944, 0.1369, 0.2272, 0.3112, 0.2148], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0338, 0.0352, 0.0310, 0.0349, 0.0355, 0.0315, 0.0354], + device='cuda:0'), out_proj_covar=tensor([6.8699e-05, 7.2593e-05, 7.6201e-05, 6.4888e-05, 7.4002e-05, 7.7417e-05, + 6.8485e-05, 7.6571e-05], device='cuda:0') +2023-04-26 15:05:41,042 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19586.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:05:41,090 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5574, 1.2462, 1.5372, 1.9024, 1.7272, 1.5245, 1.5224, 1.6077], + device='cuda:0'), covar=tensor([1.4845, 2.0104, 2.3105, 2.2906, 1.6055, 2.3908, 2.5042, 1.9752], + device='cuda:0'), in_proj_covar=tensor([0.0434, 0.0478, 0.0566, 0.0585, 0.0464, 0.0492, 0.0504, 0.0512], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:06:03,663 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8659, 2.5200, 2.0860, 2.2927, 1.8799, 2.0546, 2.0550, 1.6492], + device='cuda:0'), covar=tensor([0.2470, 0.1619, 0.1142, 0.1601, 0.3277, 0.1713, 0.2461, 0.3259], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0327, 0.0239, 0.0302, 0.0317, 0.0281, 0.0271, 0.0293], + device='cuda:0'), out_proj_covar=tensor([1.2621e-04, 1.3329e-04, 9.7394e-05, 1.2173e-04, 1.3068e-04, 1.1350e-04, + 1.1196e-04, 1.1836e-04], device='cuda:0') +2023-04-26 15:06:03,669 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=19618.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:06:06,614 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.798e+02 2.118e+02 2.540e+02 5.281e+02, threshold=4.235e+02, percent-clipped=1.0 +2023-04-26 15:06:12,782 INFO [finetune.py:976] (0/7) Epoch 4, batch 2450, loss[loss=0.2792, simple_loss=0.319, pruned_loss=0.1197, over 4811.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2752, pruned_loss=0.07931, over 955366.05 frames. ], batch size: 51, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:06:13,447 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=19634.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:06:34,056 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5191, 1.3702, 1.8685, 1.7576, 1.3911, 1.1597, 1.5838, 1.0727], + device='cuda:0'), covar=tensor([0.0923, 0.1154, 0.0644, 0.1034, 0.1146, 0.1523, 0.0897, 0.1059], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0079, 0.0077, 0.0071, 0.0083, 0.0098, 0.0087, 0.0081], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 15:06:44,233 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=19679.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:06:45,567 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.50 vs. limit=5.0 +2023-04-26 15:06:46,563 INFO [finetune.py:976] (0/7) Epoch 4, batch 2500, loss[loss=0.2039, simple_loss=0.2807, pruned_loss=0.06359, over 4830.00 frames. ], tot_loss[loss=0.2202, simple_loss=0.2781, pruned_loss=0.0812, over 953720.97 frames. ], batch size: 39, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:07:31,980 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.985e+02 2.362e+02 2.892e+02 4.639e+02, threshold=4.724e+02, percent-clipped=3.0 +2023-04-26 15:07:43,942 INFO [finetune.py:976] (0/7) Epoch 4, batch 2550, loss[loss=0.2114, simple_loss=0.2803, pruned_loss=0.07127, over 4788.00 frames. ], tot_loss[loss=0.2219, simple_loss=0.281, pruned_loss=0.08145, over 954145.61 frames. ], batch size: 29, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:08:07,128 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19750.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:08:50,442 INFO [finetune.py:976] (0/7) Epoch 4, batch 2600, loss[loss=0.241, simple_loss=0.2858, pruned_loss=0.09807, over 4746.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2823, pruned_loss=0.08206, over 954738.34 frames. ], batch size: 27, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:08:57,302 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=19785.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 15:09:23,390 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.319e+02 1.754e+02 2.126e+02 2.494e+02 3.908e+02, threshold=4.253e+02, percent-clipped=0.0 +2023-04-26 15:09:29,904 INFO [finetune.py:976] (0/7) Epoch 4, batch 2650, loss[loss=0.2294, simple_loss=0.2589, pruned_loss=0.09995, over 4091.00 frames. ], tot_loss[loss=0.2252, simple_loss=0.2839, pruned_loss=0.08323, over 952819.72 frames. ], batch size: 17, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:09:29,971 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=19833.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 15:09:31,819 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4462, 1.0362, 1.1380, 1.1323, 1.6553, 1.2924, 1.0347, 1.0997], + device='cuda:0'), covar=tensor([0.1702, 0.1830, 0.2084, 0.1553, 0.0913, 0.1766, 0.2514, 0.2160], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0338, 0.0353, 0.0310, 0.0351, 0.0354, 0.0317, 0.0354], + device='cuda:0'), out_proj_covar=tensor([6.8511e-05, 7.2458e-05, 7.6493e-05, 6.4975e-05, 7.4493e-05, 7.7221e-05, + 6.8809e-05, 7.6600e-05], device='cuda:0') +2023-04-26 15:10:15,805 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-26 15:10:35,942 INFO [finetune.py:976] (0/7) Epoch 4, batch 2700, loss[loss=0.17, simple_loss=0.2421, pruned_loss=0.04891, over 4862.00 frames. ], tot_loss[loss=0.2229, simple_loss=0.2817, pruned_loss=0.08205, over 953854.43 frames. ], batch size: 31, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:11:06,282 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-26 15:11:15,314 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.807e+02 2.180e+02 2.734e+02 4.615e+02, threshold=4.361e+02, percent-clipped=4.0 +2023-04-26 15:11:21,337 INFO [finetune.py:976] (0/7) Epoch 4, batch 2750, loss[loss=0.2434, simple_loss=0.297, pruned_loss=0.09491, over 4830.00 frames. ], tot_loss[loss=0.2209, simple_loss=0.2787, pruned_loss=0.08161, over 952344.35 frames. ], batch size: 30, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:11:49,856 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=19974.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:11:55,248 INFO [finetune.py:976] (0/7) Epoch 4, batch 2800, loss[loss=0.1816, simple_loss=0.2371, pruned_loss=0.06303, over 4940.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2751, pruned_loss=0.08036, over 950720.66 frames. ], batch size: 33, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:11:58,497 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.85 vs. limit=5.0 +2023-04-26 15:12:06,129 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-20000.pt +2023-04-26 15:12:20,810 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20020.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:12:23,470 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.089e+02 1.778e+02 2.116e+02 2.453e+02 4.080e+02, threshold=4.233e+02, percent-clipped=0.0 +2023-04-26 15:12:28,410 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-26 15:12:30,052 INFO [finetune.py:976] (0/7) Epoch 4, batch 2850, loss[loss=0.2313, simple_loss=0.2766, pruned_loss=0.09296, over 4912.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2747, pruned_loss=0.08055, over 952226.44 frames. ], batch size: 36, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:12:37,370 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-04-26 15:12:40,885 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20050.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:13:02,633 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20081.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:13:03,754 INFO [finetune.py:976] (0/7) Epoch 4, batch 2900, loss[loss=0.2261, simple_loss=0.2744, pruned_loss=0.08889, over 4172.00 frames. ], tot_loss[loss=0.2205, simple_loss=0.2776, pruned_loss=0.08173, over 948508.86 frames. ], batch size: 18, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:13:07,511 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20089.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:13:13,374 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=20098.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:13:21,907 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20112.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:13:24,204 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0464, 1.1237, 1.3211, 1.5080, 1.4454, 1.5991, 1.3963, 1.4493], + device='cuda:0'), covar=tensor([0.8099, 1.3021, 1.1386, 1.0022, 1.2099, 1.8666, 1.3662, 1.1543], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0399, 0.0320, 0.0326, 0.0350, 0.0411, 0.0386, 0.0339], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 15:13:29,802 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.335e+02 1.818e+02 2.275e+02 2.799e+02 4.560e+02, threshold=4.551e+02, percent-clipped=2.0 +2023-04-26 15:13:31,764 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-26 15:13:47,512 INFO [finetune.py:976] (0/7) Epoch 4, batch 2950, loss[loss=0.1825, simple_loss=0.2427, pruned_loss=0.06121, over 4709.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2806, pruned_loss=0.08276, over 948571.43 frames. ], batch size: 23, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:14:09,927 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20150.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:14:35,348 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20173.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:14:45,454 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0055, 1.3019, 1.1810, 1.5627, 1.4167, 1.4540, 1.2555, 2.5003], + device='cuda:0'), covar=tensor([0.0703, 0.0808, 0.0860, 0.1271, 0.0694, 0.0620, 0.0785, 0.0236], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 15:14:54,007 INFO [finetune.py:976] (0/7) Epoch 4, batch 3000, loss[loss=0.22, simple_loss=0.2815, pruned_loss=0.07919, over 4829.00 frames. ], tot_loss[loss=0.2248, simple_loss=0.2827, pruned_loss=0.0834, over 948468.63 frames. ], batch size: 47, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:14:54,008 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 15:15:01,861 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3492, 1.2815, 3.8287, 3.5350, 3.4579, 3.6989, 3.7477, 3.3864], + device='cuda:0'), covar=tensor([0.7835, 0.5912, 0.1208, 0.2157, 0.1387, 0.1434, 0.0793, 0.1729], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0312, 0.0430, 0.0437, 0.0369, 0.0422, 0.0331, 0.0391], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:15:03,009 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6960, 1.7855, 1.7309, 1.3807, 1.9271, 1.5453, 2.3254, 1.5648], + device='cuda:0'), covar=tensor([0.3642, 0.1706, 0.5488, 0.2834, 0.1609, 0.2311, 0.1617, 0.4975], + device='cuda:0'), in_proj_covar=tensor([0.0354, 0.0359, 0.0444, 0.0373, 0.0405, 0.0382, 0.0398, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:15:10,818 INFO [finetune.py:1010] (0/7) Epoch 4, validation: loss=0.1632, simple_loss=0.2363, pruned_loss=0.04509, over 2265189.00 frames. +2023-04-26 15:15:10,818 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 15:15:32,392 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6929, 2.2054, 1.7775, 2.0915, 1.6126, 1.7765, 1.9044, 1.4859], + device='cuda:0'), covar=tensor([0.2453, 0.1606, 0.1191, 0.1478, 0.3636, 0.1624, 0.2271, 0.3182], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0332, 0.0242, 0.0307, 0.0322, 0.0286, 0.0273, 0.0296], + device='cuda:0'), out_proj_covar=tensor([1.2780e-04, 1.3510e-04, 9.8689e-05, 1.2361e-04, 1.3290e-04, 1.1546e-04, + 1.1266e-04, 1.1964e-04], device='cuda:0') +2023-04-26 15:15:35,620 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20204.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:15:43,503 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6265, 1.8342, 1.8815, 2.2245, 2.0408, 2.1790, 1.5946, 4.5839], + device='cuda:0'), covar=tensor([0.0661, 0.0699, 0.0708, 0.1081, 0.0555, 0.0647, 0.0728, 0.0121], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 15:15:47,601 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.225e+02 1.833e+02 2.218e+02 2.696e+02 4.809e+02, threshold=4.435e+02, percent-clipped=1.0 +2023-04-26 15:16:05,213 INFO [finetune.py:976] (0/7) Epoch 4, batch 3050, loss[loss=0.2112, simple_loss=0.2754, pruned_loss=0.07347, over 4907.00 frames. ], tot_loss[loss=0.2255, simple_loss=0.2837, pruned_loss=0.08366, over 947771.75 frames. ], batch size: 38, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:16:38,371 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20265.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:16:49,430 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20274.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:16:50,066 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0946, 1.9352, 2.1817, 2.4079, 2.3150, 1.8428, 1.5660, 1.9629], + device='cuda:0'), covar=tensor([0.1039, 0.1091, 0.0673, 0.0653, 0.0727, 0.1121, 0.1099, 0.0772], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0208, 0.0186, 0.0181, 0.0181, 0.0198, 0.0170, 0.0192], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:16:55,395 INFO [finetune.py:976] (0/7) Epoch 4, batch 3100, loss[loss=0.2038, simple_loss=0.2686, pruned_loss=0.06952, over 4817.00 frames. ], tot_loss[loss=0.2214, simple_loss=0.2797, pruned_loss=0.08152, over 949593.23 frames. ], batch size: 39, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:16:59,697 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-26 15:17:19,875 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1617, 1.5061, 1.9933, 2.4557, 1.9511, 1.5170, 1.1709, 1.8467], + device='cuda:0'), covar=tensor([0.4103, 0.5224, 0.2263, 0.3609, 0.4382, 0.3802, 0.6413, 0.3732], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0265, 0.0222, 0.0335, 0.0224, 0.0231, 0.0251, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 15:17:21,570 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=20322.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:17:22,123 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.364e+02 1.832e+02 2.083e+02 2.367e+02 4.071e+02, threshold=4.166e+02, percent-clipped=0.0 +2023-04-26 15:17:25,413 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-26 15:17:28,237 INFO [finetune.py:976] (0/7) Epoch 4, batch 3150, loss[loss=0.1668, simple_loss=0.2305, pruned_loss=0.05157, over 4758.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.277, pruned_loss=0.08116, over 950982.17 frames. ], batch size: 26, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:17:41,648 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0043, 1.4347, 1.8545, 2.2518, 1.6856, 1.4363, 1.0353, 1.5906], + device='cuda:0'), covar=tensor([0.4593, 0.5613, 0.2604, 0.3537, 0.5010, 0.3930, 0.6743, 0.3870], + device='cuda:0'), in_proj_covar=tensor([0.0272, 0.0265, 0.0222, 0.0335, 0.0225, 0.0231, 0.0252, 0.0201], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 15:17:57,217 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20376.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:18:01,415 INFO [finetune.py:976] (0/7) Epoch 4, batch 3200, loss[loss=0.1999, simple_loss=0.2615, pruned_loss=0.06917, over 4804.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2742, pruned_loss=0.08014, over 952123.83 frames. ], batch size: 45, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:18:04,536 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20388.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:18:08,099 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-26 15:18:10,315 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5724, 1.4923, 1.9335, 1.8296, 1.4538, 1.1619, 1.5998, 1.0864], + device='cuda:0'), covar=tensor([0.0804, 0.0763, 0.0557, 0.0821, 0.0961, 0.1537, 0.0790, 0.1030], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0078, 0.0077, 0.0070, 0.0082, 0.0098, 0.0086, 0.0080], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 15:18:16,570 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7155, 3.7658, 0.9487, 1.9635, 2.1419, 2.4956, 2.1628, 1.0973], + device='cuda:0'), covar=tensor([0.1322, 0.0891, 0.2142, 0.1471, 0.1084, 0.1164, 0.1477, 0.1909], + device='cuda:0'), in_proj_covar=tensor([0.0121, 0.0262, 0.0146, 0.0128, 0.0138, 0.0160, 0.0124, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 15:18:25,053 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2949, 1.2194, 3.8162, 3.5480, 3.4080, 3.6207, 3.6899, 3.3799], + device='cuda:0'), covar=tensor([0.7046, 0.5827, 0.1291, 0.2001, 0.1276, 0.2191, 0.1360, 0.1493], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0308, 0.0423, 0.0431, 0.0364, 0.0417, 0.0325, 0.0385], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:18:25,086 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20418.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:18:27,371 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5162, 3.1306, 1.1676, 1.8909, 1.9085, 2.4696, 2.0445, 1.3691], + device='cuda:0'), covar=tensor([0.1189, 0.0832, 0.1708, 0.1244, 0.0974, 0.0896, 0.1209, 0.1873], + device='cuda:0'), in_proj_covar=tensor([0.0121, 0.0262, 0.0147, 0.0129, 0.0138, 0.0160, 0.0124, 0.0128], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 15:18:28,510 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.902e+02 2.131e+02 2.743e+02 4.123e+02, threshold=4.262e+02, percent-clipped=0.0 +2023-04-26 15:18:34,645 INFO [finetune.py:976] (0/7) Epoch 4, batch 3250, loss[loss=0.2204, simple_loss=0.2809, pruned_loss=0.07992, over 4925.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2746, pruned_loss=0.08002, over 952186.02 frames. ], batch size: 33, lr: 3.96e-03, grad_scale: 64.0 +2023-04-26 15:18:42,470 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20445.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:18:45,893 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20449.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:19:04,239 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20468.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:19:11,497 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20479.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:19:13,809 INFO [finetune.py:976] (0/7) Epoch 4, batch 3300, loss[loss=0.2152, simple_loss=0.2693, pruned_loss=0.08056, over 4782.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.278, pruned_loss=0.08123, over 951342.85 frames. ], batch size: 26, lr: 3.96e-03, grad_scale: 32.0 +2023-04-26 15:19:19,987 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2036, 2.6696, 1.0471, 1.4833, 1.9908, 1.2423, 3.6644, 1.8803], + device='cuda:0'), covar=tensor([0.0728, 0.0794, 0.0872, 0.1337, 0.0571, 0.1137, 0.0303, 0.0633], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0071, 0.0053, 0.0050, 0.0055, 0.0055, 0.0083, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 15:19:32,154 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8451, 2.4306, 2.0006, 2.2453, 1.6173, 1.8934, 2.1814, 1.6148], + device='cuda:0'), covar=tensor([0.2525, 0.1421, 0.1024, 0.1522, 0.3814, 0.1490, 0.2162, 0.2939], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0335, 0.0244, 0.0309, 0.0324, 0.0288, 0.0275, 0.0298], + device='cuda:0'), out_proj_covar=tensor([1.2867e-04, 1.3648e-04, 9.9372e-05, 1.2476e-04, 1.3356e-04, 1.1630e-04, + 1.1330e-04, 1.2064e-04], device='cuda:0') +2023-04-26 15:19:47,321 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.894e+02 2.274e+02 2.897e+02 7.817e+02, threshold=4.548e+02, percent-clipped=5.0 +2023-04-26 15:19:58,667 INFO [finetune.py:976] (0/7) Epoch 4, batch 3350, loss[loss=0.2112, simple_loss=0.2672, pruned_loss=0.07759, over 4900.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.28, pruned_loss=0.08135, over 951379.66 frames. ], batch size: 32, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:20:39,499 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20560.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:20:55,812 INFO [finetune.py:976] (0/7) Epoch 4, batch 3400, loss[loss=0.2241, simple_loss=0.2805, pruned_loss=0.08383, over 4922.00 frames. ], tot_loss[loss=0.222, simple_loss=0.2812, pruned_loss=0.08143, over 951324.97 frames. ], batch size: 42, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:21:04,538 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=20588.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:21:26,650 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-26 15:21:29,474 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.806e+02 2.102e+02 2.435e+02 3.817e+02, threshold=4.203e+02, percent-clipped=0.0 +2023-04-26 15:21:34,360 INFO [finetune.py:976] (0/7) Epoch 4, batch 3450, loss[loss=0.219, simple_loss=0.277, pruned_loss=0.08053, over 4890.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2792, pruned_loss=0.07993, over 952099.68 frames. ], batch size: 35, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:21:44,623 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=20649.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:21:58,163 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3919, 3.2330, 0.9773, 1.6857, 1.8395, 2.2529, 1.9024, 0.9863], + device='cuda:0'), covar=tensor([0.1371, 0.0942, 0.1937, 0.1417, 0.1074, 0.1115, 0.1468, 0.2051], + device='cuda:0'), in_proj_covar=tensor([0.0121, 0.0261, 0.0146, 0.0128, 0.0137, 0.0160, 0.0124, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 15:22:19,756 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1402, 1.4195, 1.3311, 1.8070, 1.5610, 1.8166, 1.3469, 3.1197], + device='cuda:0'), covar=tensor([0.0703, 0.0815, 0.0842, 0.1227, 0.0696, 0.0500, 0.0811, 0.0171], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 15:22:19,762 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20676.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:22:23,907 INFO [finetune.py:976] (0/7) Epoch 4, batch 3500, loss[loss=0.1629, simple_loss=0.2314, pruned_loss=0.04717, over 4750.00 frames. ], tot_loss[loss=0.2192, simple_loss=0.278, pruned_loss=0.08021, over 950931.55 frames. ], batch size: 27, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:22:58,017 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=20724.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:22:58,547 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.751e+02 2.119e+02 2.772e+02 4.921e+02, threshold=4.238e+02, percent-clipped=1.0 +2023-04-26 15:23:03,441 INFO [finetune.py:976] (0/7) Epoch 4, batch 3550, loss[loss=0.2187, simple_loss=0.2779, pruned_loss=0.07975, over 4744.00 frames. ], tot_loss[loss=0.2182, simple_loss=0.2762, pruned_loss=0.0801, over 952675.30 frames. ], batch size: 26, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:23:15,955 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20744.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:23:16,609 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20745.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:23:48,058 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20768.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:23:58,622 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20774.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:24:04,022 INFO [finetune.py:976] (0/7) Epoch 4, batch 3600, loss[loss=0.2405, simple_loss=0.2937, pruned_loss=0.09363, over 4856.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2744, pruned_loss=0.07944, over 954180.32 frames. ], batch size: 44, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:24:10,255 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=20793.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:24:10,292 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1204, 2.7461, 1.0448, 1.3724, 2.0220, 1.3696, 3.6084, 1.9463], + device='cuda:0'), covar=tensor([0.0694, 0.0684, 0.0885, 0.1249, 0.0517, 0.0946, 0.0202, 0.0570], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0071, 0.0053, 0.0050, 0.0055, 0.0055, 0.0083, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 15:24:31,213 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=20816.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:24:38,615 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.726e+02 2.072e+02 2.545e+02 4.959e+02, threshold=4.144e+02, percent-clipped=1.0 +2023-04-26 15:24:43,526 INFO [finetune.py:976] (0/7) Epoch 4, batch 3650, loss[loss=0.2673, simple_loss=0.3198, pruned_loss=0.1074, over 4743.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2773, pruned_loss=0.08101, over 952822.97 frames. ], batch size: 59, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:25:00,490 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=20860.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:25:09,711 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1986, 1.3754, 1.3045, 1.9073, 1.5856, 1.8855, 1.3288, 4.1640], + device='cuda:0'), covar=tensor([0.0856, 0.1077, 0.1094, 0.1431, 0.0876, 0.0770, 0.1027, 0.0190], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 15:25:15,092 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1313, 1.0213, 1.2684, 1.1929, 1.0854, 0.9012, 1.0213, 0.5146], + device='cuda:0'), covar=tensor([0.0692, 0.0802, 0.0705, 0.0920, 0.0953, 0.1662, 0.0610, 0.1362], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0078, 0.0077, 0.0070, 0.0082, 0.0098, 0.0086, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 15:25:16,801 INFO [finetune.py:976] (0/7) Epoch 4, batch 3700, loss[loss=0.2229, simple_loss=0.2811, pruned_loss=0.08231, over 4774.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2818, pruned_loss=0.08282, over 955312.39 frames. ], batch size: 26, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:25:32,425 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=20908.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:25:49,668 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.4508, 4.5068, 3.2418, 5.1603, 4.5317, 4.4182, 2.1157, 4.4070], + device='cuda:0'), covar=tensor([0.1555, 0.0903, 0.2864, 0.0922, 0.2823, 0.1622, 0.5527, 0.1864], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0221, 0.0259, 0.0313, 0.0308, 0.0256, 0.0275, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 15:25:50,202 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.325e+02 1.979e+02 2.308e+02 2.768e+02 4.690e+02, threshold=4.617e+02, percent-clipped=4.0 +2023-04-26 15:26:00,827 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5605, 3.1750, 0.9006, 1.7175, 1.7883, 2.3095, 1.8348, 1.0592], + device='cuda:0'), covar=tensor([0.1174, 0.0914, 0.1984, 0.1331, 0.1033, 0.1009, 0.1424, 0.1992], + device='cuda:0'), in_proj_covar=tensor([0.0120, 0.0261, 0.0146, 0.0127, 0.0138, 0.0160, 0.0124, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 15:26:01,984 INFO [finetune.py:976] (0/7) Epoch 4, batch 3750, loss[loss=0.165, simple_loss=0.2193, pruned_loss=0.05531, over 3997.00 frames. ], tot_loss[loss=0.2244, simple_loss=0.283, pruned_loss=0.08294, over 953104.14 frames. ], batch size: 17, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:26:14,223 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=20944.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:27:02,460 INFO [finetune.py:976] (0/7) Epoch 4, batch 3800, loss[loss=0.2449, simple_loss=0.2975, pruned_loss=0.09617, over 4157.00 frames. ], tot_loss[loss=0.2231, simple_loss=0.2822, pruned_loss=0.08198, over 952208.40 frames. ], batch size: 65, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:27:46,871 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.703e+02 2.110e+02 2.594e+02 5.214e+02, threshold=4.219e+02, percent-clipped=1.0 +2023-04-26 15:28:05,809 INFO [finetune.py:976] (0/7) Epoch 4, batch 3850, loss[loss=0.2369, simple_loss=0.3011, pruned_loss=0.0864, over 4867.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2805, pruned_loss=0.0809, over 952831.87 frames. ], batch size: 34, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:28:18,862 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21044.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:28:29,647 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21052.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:28:54,706 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21074.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:29:11,949 INFO [finetune.py:976] (0/7) Epoch 4, batch 3900, loss[loss=0.2289, simple_loss=0.2821, pruned_loss=0.08786, over 4934.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2788, pruned_loss=0.08091, over 954483.03 frames. ], batch size: 38, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:29:17,339 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1341, 1.9817, 2.0917, 2.5161, 2.3237, 1.9154, 1.7306, 2.0157], + device='cuda:0'), covar=tensor([0.1031, 0.1085, 0.0770, 0.0655, 0.0697, 0.1005, 0.1049, 0.0745], + device='cuda:0'), in_proj_covar=tensor([0.0208, 0.0208, 0.0186, 0.0181, 0.0181, 0.0198, 0.0171, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:29:18,470 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=21092.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:29:31,294 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21113.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:29:34,794 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21118.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:29:37,127 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=21122.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:29:38,426 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2323, 2.0928, 2.4981, 2.6497, 1.9765, 1.6752, 2.2429, 1.1758], + device='cuda:0'), covar=tensor([0.0860, 0.1191, 0.0668, 0.1207, 0.1235, 0.1635, 0.1154, 0.1403], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0078, 0.0076, 0.0070, 0.0082, 0.0097, 0.0086, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 15:29:38,885 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.817e+02 2.053e+02 2.461e+02 7.075e+02, threshold=4.105e+02, percent-clipped=2.0 +2023-04-26 15:29:43,719 INFO [finetune.py:976] (0/7) Epoch 4, batch 3950, loss[loss=0.2293, simple_loss=0.2747, pruned_loss=0.09193, over 4821.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.275, pruned_loss=0.07937, over 953202.95 frames. ], batch size: 25, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:30:14,120 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21179.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:30:16,453 INFO [finetune.py:976] (0/7) Epoch 4, batch 4000, loss[loss=0.199, simple_loss=0.2635, pruned_loss=0.06724, over 4820.00 frames. ], tot_loss[loss=0.2164, simple_loss=0.2745, pruned_loss=0.07916, over 954169.21 frames. ], batch size: 45, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:30:35,264 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2943, 1.4801, 1.4333, 1.9475, 1.7065, 1.9849, 1.5044, 4.1671], + device='cuda:0'), covar=tensor([0.0723, 0.0839, 0.0885, 0.1285, 0.0693, 0.0688, 0.0804, 0.0130], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 15:30:45,241 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.385e+02 1.961e+02 2.328e+02 2.721e+02 5.358e+02, threshold=4.656e+02, percent-clipped=3.0 +2023-04-26 15:30:50,169 INFO [finetune.py:976] (0/7) Epoch 4, batch 4050, loss[loss=0.2743, simple_loss=0.3211, pruned_loss=0.1138, over 4862.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.279, pruned_loss=0.08094, over 954690.05 frames. ], batch size: 31, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:30:53,314 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2983, 2.2311, 1.7904, 1.8757, 2.2276, 1.8119, 2.7505, 1.5559], + device='cuda:0'), covar=tensor([0.3847, 0.1604, 0.4538, 0.3436, 0.2070, 0.2455, 0.1612, 0.4409], + device='cuda:0'), in_proj_covar=tensor([0.0353, 0.0355, 0.0439, 0.0370, 0.0401, 0.0380, 0.0397, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:30:58,864 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21244.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:31:01,155 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2338, 1.5648, 1.5469, 1.7628, 1.7221, 2.0184, 1.4833, 3.7579], + device='cuda:0'), covar=tensor([0.0761, 0.0845, 0.0875, 0.1335, 0.0698, 0.0640, 0.0802, 0.0127], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 15:31:22,973 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9011, 2.2720, 0.9073, 1.1586, 1.4882, 1.1813, 2.5099, 1.3885], + device='cuda:0'), covar=tensor([0.0726, 0.0550, 0.0729, 0.1352, 0.0485, 0.0991, 0.0315, 0.0709], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0070, 0.0053, 0.0049, 0.0054, 0.0055, 0.0083, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 15:31:23,490 INFO [finetune.py:976] (0/7) Epoch 4, batch 4100, loss[loss=0.2011, simple_loss=0.2644, pruned_loss=0.06893, over 4761.00 frames. ], tot_loss[loss=0.2226, simple_loss=0.2821, pruned_loss=0.08151, over 955301.22 frames. ], batch size: 26, lr: 3.96e-03, grad_scale: 16.0 +2023-04-26 15:31:29,562 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=21292.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:31:43,155 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21312.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:31:50,917 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.937e+02 2.313e+02 2.737e+02 5.004e+02, threshold=4.625e+02, percent-clipped=1.0 +2023-04-26 15:31:52,891 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21328.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:31:56,304 INFO [finetune.py:976] (0/7) Epoch 4, batch 4150, loss[loss=0.1888, simple_loss=0.265, pruned_loss=0.0563, over 4844.00 frames. ], tot_loss[loss=0.2237, simple_loss=0.2832, pruned_loss=0.0821, over 955253.87 frames. ], batch size: 49, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:32:40,033 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21373.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:32:44,268 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1272, 2.4291, 0.9343, 1.3342, 1.8102, 1.2496, 3.1046, 1.6941], + device='cuda:0'), covar=tensor([0.0659, 0.0566, 0.0785, 0.1221, 0.0494, 0.0980, 0.0288, 0.0603], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0071, 0.0053, 0.0050, 0.0054, 0.0055, 0.0083, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 15:32:46,018 INFO [finetune.py:976] (0/7) Epoch 4, batch 4200, loss[loss=0.2532, simple_loss=0.3082, pruned_loss=0.09911, over 4887.00 frames. ], tot_loss[loss=0.2216, simple_loss=0.2819, pruned_loss=0.08063, over 955164.60 frames. ], batch size: 35, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:32:50,259 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21389.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:32:57,360 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0162, 0.7365, 0.8521, 0.6812, 1.1279, 0.9132, 0.7756, 0.8539], + device='cuda:0'), covar=tensor([0.1624, 0.1511, 0.1900, 0.1688, 0.0950, 0.1372, 0.1838, 0.2035], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0339, 0.0355, 0.0311, 0.0352, 0.0354, 0.0316, 0.0354], + device='cuda:0'), out_proj_covar=tensor([6.8503e-05, 7.2756e-05, 7.7082e-05, 6.5119e-05, 7.4812e-05, 7.7229e-05, + 6.8645e-05, 7.6494e-05], device='cuda:0') +2023-04-26 15:32:59,832 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1043, 0.6279, 0.9176, 0.7057, 1.2668, 0.9833, 0.8130, 0.9800], + device='cuda:0'), covar=tensor([0.2306, 0.2053, 0.2514, 0.1995, 0.1272, 0.2111, 0.2894, 0.2544], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0339, 0.0355, 0.0311, 0.0352, 0.0354, 0.0316, 0.0354], + device='cuda:0'), out_proj_covar=tensor([6.8451e-05, 7.2701e-05, 7.7041e-05, 6.5102e-05, 7.4754e-05, 7.7177e-05, + 6.8614e-05, 7.6484e-05], device='cuda:0') +2023-04-26 15:33:21,599 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21408.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:33:42,773 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.624e+02 1.995e+02 2.490e+02 4.928e+02, threshold=3.989e+02, percent-clipped=1.0 +2023-04-26 15:33:52,192 INFO [finetune.py:976] (0/7) Epoch 4, batch 4250, loss[loss=0.1855, simple_loss=0.2499, pruned_loss=0.06051, over 4803.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2797, pruned_loss=0.08042, over 953148.26 frames. ], batch size: 51, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:34:11,318 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.75 vs. limit=5.0 +2023-04-26 15:34:30,938 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21474.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:34:41,942 INFO [finetune.py:976] (0/7) Epoch 4, batch 4300, loss[loss=0.1795, simple_loss=0.237, pruned_loss=0.061, over 4822.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2755, pruned_loss=0.07851, over 953123.33 frames. ], batch size: 41, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:34:42,071 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9416, 2.1238, 1.8419, 1.6613, 2.1580, 1.6782, 2.7373, 1.6092], + device='cuda:0'), covar=tensor([0.4766, 0.1852, 0.5339, 0.3453, 0.1931, 0.2807, 0.1547, 0.5052], + device='cuda:0'), in_proj_covar=tensor([0.0360, 0.0361, 0.0445, 0.0374, 0.0407, 0.0387, 0.0401, 0.0428], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:35:06,264 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0688, 4.4125, 1.0738, 2.2024, 2.7457, 2.6334, 2.6963, 1.0335], + device='cuda:0'), covar=tensor([0.1190, 0.0790, 0.1977, 0.1266, 0.0854, 0.1208, 0.1259, 0.1976], + device='cuda:0'), in_proj_covar=tensor([0.0121, 0.0260, 0.0146, 0.0128, 0.0137, 0.0159, 0.0124, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 15:35:27,095 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.707e+02 2.018e+02 2.403e+02 4.975e+02, threshold=4.035e+02, percent-clipped=3.0 +2023-04-26 15:35:31,900 INFO [finetune.py:976] (0/7) Epoch 4, batch 4350, loss[loss=0.2266, simple_loss=0.2834, pruned_loss=0.0849, over 4834.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2724, pruned_loss=0.07738, over 953446.52 frames. ], batch size: 47, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:35:43,244 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6657, 2.1410, 1.8698, 2.1313, 1.6495, 1.7382, 1.9259, 1.4489], + device='cuda:0'), covar=tensor([0.2421, 0.1638, 0.1089, 0.1503, 0.3974, 0.1561, 0.1944, 0.3153], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0332, 0.0241, 0.0304, 0.0321, 0.0285, 0.0273, 0.0296], + device='cuda:0'), out_proj_covar=tensor([1.2680e-04, 1.3540e-04, 9.7948e-05, 1.2264e-04, 1.3234e-04, 1.1513e-04, + 1.1237e-04, 1.1948e-04], device='cuda:0') +2023-04-26 15:36:37,729 INFO [finetune.py:976] (0/7) Epoch 4, batch 4400, loss[loss=0.2051, simple_loss=0.2638, pruned_loss=0.0732, over 4783.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.275, pruned_loss=0.07924, over 951568.88 frames. ], batch size: 26, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:36:51,075 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21595.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:37:09,365 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21613.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:37:12,903 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1808, 2.9689, 0.9848, 1.4486, 2.3375, 1.4692, 4.2133, 1.9047], + device='cuda:0'), covar=tensor([0.0767, 0.0855, 0.0967, 0.1366, 0.0594, 0.1052, 0.0252, 0.0702], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0071, 0.0053, 0.0050, 0.0054, 0.0055, 0.0083, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 15:37:17,113 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.164e+02 1.848e+02 2.167e+02 2.564e+02 6.946e+02, threshold=4.335e+02, percent-clipped=3.0 +2023-04-26 15:37:22,022 INFO [finetune.py:976] (0/7) Epoch 4, batch 4450, loss[loss=0.2023, simple_loss=0.2655, pruned_loss=0.06955, over 4870.00 frames. ], tot_loss[loss=0.2197, simple_loss=0.2785, pruned_loss=0.08041, over 951203.28 frames. ], batch size: 31, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:37:27,053 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4715, 1.7325, 1.6567, 2.0825, 1.8023, 2.0100, 1.6729, 3.4396], + device='cuda:0'), covar=tensor([0.0688, 0.0746, 0.0774, 0.1076, 0.0619, 0.0556, 0.0719, 0.0159], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0047, 0.0042, 0.0041, 0.0041, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 15:37:36,740 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21656.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:37:46,451 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21668.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:37:48,263 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4202, 1.1999, 1.6370, 1.5317, 1.2809, 1.1585, 1.3100, 0.8165], + device='cuda:0'), covar=tensor([0.0657, 0.0945, 0.0569, 0.0810, 0.1032, 0.1537, 0.0700, 0.1088], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0077, 0.0075, 0.0070, 0.0081, 0.0097, 0.0085, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 15:37:50,666 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21674.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:37:56,116 INFO [finetune.py:976] (0/7) Epoch 4, batch 4500, loss[loss=0.2256, simple_loss=0.2898, pruned_loss=0.08071, over 4835.00 frames. ], tot_loss[loss=0.2204, simple_loss=0.2795, pruned_loss=0.08063, over 950043.47 frames. ], batch size: 30, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:37:56,775 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21684.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:38:02,519 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-26 15:38:10,175 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4224, 2.3623, 1.8353, 2.0682, 2.4072, 1.8093, 3.1178, 1.6802], + device='cuda:0'), covar=tensor([0.4600, 0.2187, 0.4911, 0.4174, 0.2134, 0.3089, 0.1920, 0.5137], + device='cuda:0'), in_proj_covar=tensor([0.0355, 0.0356, 0.0439, 0.0369, 0.0401, 0.0382, 0.0395, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:38:11,961 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21708.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:38:21,105 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-26 15:38:24,949 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.777e+02 2.074e+02 2.624e+02 7.543e+02, threshold=4.148e+02, percent-clipped=2.0 +2023-04-26 15:38:29,906 INFO [finetune.py:976] (0/7) Epoch 4, batch 4550, loss[loss=0.2484, simple_loss=0.3042, pruned_loss=0.09629, over 4707.00 frames. ], tot_loss[loss=0.2211, simple_loss=0.2809, pruned_loss=0.08067, over 952113.40 frames. ], batch size: 54, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:38:40,467 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3231, 1.3032, 1.3820, 1.5883, 1.6020, 1.2735, 0.9519, 1.3953], + device='cuda:0'), covar=tensor([0.0896, 0.1189, 0.0671, 0.0602, 0.0568, 0.0909, 0.0988, 0.0601], + device='cuda:0'), in_proj_covar=tensor([0.0205, 0.0205, 0.0183, 0.0179, 0.0179, 0.0195, 0.0168, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:38:40,644 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-04-26 15:38:44,654 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=21756.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:38:49,065 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21763.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:38:58,639 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21774.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:39:02,403 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7670, 0.9680, 1.3083, 1.5226, 1.4926, 1.6577, 1.3371, 1.3725], + device='cuda:0'), covar=tensor([0.7759, 1.2917, 1.0915, 0.9576, 1.1720, 1.9154, 1.3183, 1.0799], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0399, 0.0319, 0.0327, 0.0349, 0.0413, 0.0385, 0.0339], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 15:39:04,081 INFO [finetune.py:976] (0/7) Epoch 4, batch 4600, loss[loss=0.2021, simple_loss=0.2537, pruned_loss=0.07528, over 4886.00 frames. ], tot_loss[loss=0.2206, simple_loss=0.2807, pruned_loss=0.08018, over 955528.35 frames. ], batch size: 32, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:39:29,984 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=21822.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:39:31,712 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21824.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:39:37,419 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.091e+02 1.786e+02 2.120e+02 2.552e+02 5.015e+02, threshold=4.240e+02, percent-clipped=1.0 +2023-04-26 15:39:48,032 INFO [finetune.py:976] (0/7) Epoch 4, batch 4650, loss[loss=0.1824, simple_loss=0.2439, pruned_loss=0.06051, over 4765.00 frames. ], tot_loss[loss=0.2193, simple_loss=0.2784, pruned_loss=0.0801, over 957140.06 frames. ], batch size: 54, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:40:04,080 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21850.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:40:33,081 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1950, 1.5042, 1.3802, 1.7423, 1.5538, 1.9032, 1.3365, 3.2957], + device='cuda:0'), covar=tensor([0.0746, 0.0826, 0.0838, 0.1316, 0.0702, 0.0570, 0.0793, 0.0193], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0047, 0.0042, 0.0041, 0.0041, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 15:40:49,528 INFO [finetune.py:976] (0/7) Epoch 4, batch 4700, loss[loss=0.2191, simple_loss=0.2682, pruned_loss=0.08495, over 4807.00 frames. ], tot_loss[loss=0.2177, simple_loss=0.2764, pruned_loss=0.07949, over 959210.26 frames. ], batch size: 38, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:41:11,517 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3746, 3.3889, 0.8987, 1.7733, 1.8451, 2.3281, 1.9703, 0.9038], + device='cuda:0'), covar=tensor([0.1435, 0.1012, 0.2180, 0.1403, 0.1163, 0.1089, 0.1559, 0.2067], + device='cuda:0'), in_proj_covar=tensor([0.0121, 0.0261, 0.0146, 0.0127, 0.0137, 0.0159, 0.0124, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 15:41:12,780 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=21911.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:41:21,740 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.822e+02 2.098e+02 2.414e+02 4.975e+02, threshold=4.197e+02, percent-clipped=3.0 +2023-04-26 15:41:28,264 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-26 15:41:28,679 INFO [finetune.py:976] (0/7) Epoch 4, batch 4750, loss[loss=0.2118, simple_loss=0.2725, pruned_loss=0.07557, over 4761.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2749, pruned_loss=0.07916, over 958627.22 frames. ], batch size: 28, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:41:40,248 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21951.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:41:50,658 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21968.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:41:51,775 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=21969.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:41:52,416 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=21970.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:42:02,274 INFO [finetune.py:976] (0/7) Epoch 4, batch 4800, loss[loss=0.1992, simple_loss=0.2588, pruned_loss=0.06986, over 4784.00 frames. ], tot_loss[loss=0.2173, simple_loss=0.2764, pruned_loss=0.07908, over 958834.86 frames. ], batch size: 26, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:42:03,022 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=21984.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:42:10,243 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6962, 3.7314, 2.7747, 4.3513, 3.8157, 3.8090, 1.7707, 3.6777], + device='cuda:0'), covar=tensor([0.1854, 0.1215, 0.2968, 0.1608, 0.3953, 0.1806, 0.5335, 0.2413], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0223, 0.0260, 0.0314, 0.0310, 0.0258, 0.0276, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 15:42:18,908 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-22000.pt +2023-04-26 15:42:39,319 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22016.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:42:41,822 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-04-26 15:42:50,414 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.864e+02 2.140e+02 2.595e+02 5.123e+02, threshold=4.279e+02, percent-clipped=1.0 +2023-04-26 15:42:50,650 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.40 vs. limit=5.0 +2023-04-26 15:42:54,226 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22031.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:42:55,318 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22032.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:42:56,369 INFO [finetune.py:976] (0/7) Epoch 4, batch 4850, loss[loss=0.2511, simple_loss=0.3062, pruned_loss=0.09801, over 4931.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2804, pruned_loss=0.08045, over 959407.16 frames. ], batch size: 38, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:43:45,552 INFO [finetune.py:976] (0/7) Epoch 4, batch 4900, loss[loss=0.2215, simple_loss=0.2788, pruned_loss=0.08214, over 4750.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2803, pruned_loss=0.08013, over 957594.48 frames. ], batch size: 27, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:44:15,730 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22119.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:44:19,288 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.858e+02 2.268e+02 2.601e+02 5.071e+02, threshold=4.535e+02, percent-clipped=2.0 +2023-04-26 15:44:22,980 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22130.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:44:24,745 INFO [finetune.py:976] (0/7) Epoch 4, batch 4950, loss[loss=0.2035, simple_loss=0.2803, pruned_loss=0.06338, over 4912.00 frames. ], tot_loss[loss=0.2223, simple_loss=0.2825, pruned_loss=0.08107, over 958054.88 frames. ], batch size: 38, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:44:53,682 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22176.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:44:58,358 INFO [finetune.py:976] (0/7) Epoch 4, batch 5000, loss[loss=0.1611, simple_loss=0.2289, pruned_loss=0.04662, over 4940.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2794, pruned_loss=0.07941, over 955846.31 frames. ], batch size: 33, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:45:03,882 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22191.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:45:15,014 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22206.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:45:20,521 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22215.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:45:31,709 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.348e+02 1.793e+02 1.980e+02 2.407e+02 3.890e+02, threshold=3.961e+02, percent-clipped=0.0 +2023-04-26 15:45:42,551 INFO [finetune.py:976] (0/7) Epoch 4, batch 5050, loss[loss=0.215, simple_loss=0.2666, pruned_loss=0.08171, over 4831.00 frames. ], tot_loss[loss=0.2171, simple_loss=0.2768, pruned_loss=0.07873, over 954206.34 frames. ], batch size: 33, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:45:45,104 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:45:45,117 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22237.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:46:06,546 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22251.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:46:28,128 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22269.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:46:32,403 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22276.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:46:42,370 INFO [finetune.py:976] (0/7) Epoch 4, batch 5100, loss[loss=0.2045, simple_loss=0.2722, pruned_loss=0.06842, over 4771.00 frames. ], tot_loss[loss=0.2142, simple_loss=0.2734, pruned_loss=0.07754, over 955158.26 frames. ], batch size: 26, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:46:50,209 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4014, 2.4669, 2.0698, 2.2513, 2.4836, 1.9875, 3.4033, 1.9577], + device='cuda:0'), covar=tensor([0.5133, 0.2428, 0.4678, 0.3908, 0.2465, 0.3218, 0.1621, 0.4826], + device='cuda:0'), in_proj_covar=tensor([0.0355, 0.0356, 0.0438, 0.0370, 0.0402, 0.0384, 0.0398, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:46:52,058 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22298.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:46:53,111 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22299.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:46:57,723 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 15:47:06,009 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22317.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:47:10,807 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.090e+02 1.713e+02 2.102e+02 2.590e+02 4.893e+02, threshold=4.205e+02, percent-clipped=2.0 +2023-04-26 15:47:11,497 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22326.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:47:15,718 INFO [finetune.py:976] (0/7) Epoch 4, batch 5150, loss[loss=0.2235, simple_loss=0.2824, pruned_loss=0.08233, over 4846.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2726, pruned_loss=0.07802, over 953188.05 frames. ], batch size: 47, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:47:16,590 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.43 vs. limit=5.0 +2023-04-26 15:47:52,650 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9732, 1.3937, 1.8002, 2.1895, 1.7408, 1.3947, 1.1504, 1.6038], + device='cuda:0'), covar=tensor([0.3803, 0.4461, 0.2059, 0.3279, 0.3863, 0.3298, 0.6153, 0.3440], + device='cuda:0'), in_proj_covar=tensor([0.0273, 0.0264, 0.0221, 0.0333, 0.0223, 0.0230, 0.0249, 0.0199], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 15:47:54,331 INFO [finetune.py:976] (0/7) Epoch 4, batch 5200, loss[loss=0.2391, simple_loss=0.304, pruned_loss=0.08713, over 4860.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2754, pruned_loss=0.0791, over 954193.56 frames. ], batch size: 44, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:48:01,215 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22385.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:48:05,962 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7187, 1.6805, 1.7833, 2.0960, 2.1512, 1.6981, 1.4352, 1.8063], + device='cuda:0'), covar=tensor([0.0979, 0.1140, 0.0825, 0.0596, 0.0604, 0.0911, 0.0977, 0.0657], + device='cuda:0'), in_proj_covar=tensor([0.0207, 0.0208, 0.0186, 0.0182, 0.0181, 0.0197, 0.0170, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:48:22,632 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6892, 1.2744, 1.2374, 1.3685, 1.9677, 1.5798, 1.2108, 1.2000], + device='cuda:0'), covar=tensor([0.1741, 0.1682, 0.2560, 0.1462, 0.0749, 0.1801, 0.2382, 0.2257], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0335, 0.0353, 0.0310, 0.0349, 0.0350, 0.0316, 0.0354], + device='cuda:0'), out_proj_covar=tensor([6.8212e-05, 7.1884e-05, 7.6489e-05, 6.4778e-05, 7.3894e-05, 7.6299e-05, + 6.8505e-05, 7.6373e-05], device='cuda:0') +2023-04-26 15:48:30,904 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22419.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:48:34,446 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.341e+02 1.984e+02 2.270e+02 2.624e+02 8.595e+02, threshold=4.540e+02, percent-clipped=4.0 +2023-04-26 15:48:39,404 INFO [finetune.py:976] (0/7) Epoch 4, batch 5250, loss[loss=0.2785, simple_loss=0.3185, pruned_loss=0.1192, over 4833.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.278, pruned_loss=0.07947, over 954668.69 frames. ], batch size: 30, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:48:47,957 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22446.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:49:01,857 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-26 15:49:09,417 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22467.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:49:25,135 INFO [finetune.py:976] (0/7) Epoch 4, batch 5300, loss[loss=0.2152, simple_loss=0.2753, pruned_loss=0.07748, over 4851.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2807, pruned_loss=0.08037, over 954759.11 frames. ], batch size: 31, lr: 3.95e-03, grad_scale: 16.0 +2023-04-26 15:49:32,895 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22486.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:49:56,902 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22506.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:50:15,639 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.302e+02 1.836e+02 2.188e+02 2.701e+02 5.070e+02, threshold=4.376e+02, percent-clipped=2.0 +2023-04-26 15:50:19,961 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22532.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:50:20,501 INFO [finetune.py:976] (0/7) Epoch 4, batch 5350, loss[loss=0.2446, simple_loss=0.296, pruned_loss=0.09661, over 4895.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2802, pruned_loss=0.07976, over 956317.33 frames. ], batch size: 37, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:50:33,845 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22554.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:50:46,631 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22571.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:50:53,880 INFO [finetune.py:976] (0/7) Epoch 4, batch 5400, loss[loss=0.2129, simple_loss=0.2775, pruned_loss=0.07413, over 4778.00 frames. ], tot_loss[loss=0.2187, simple_loss=0.2786, pruned_loss=0.07943, over 957349.40 frames. ], batch size: 29, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:50:59,980 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22593.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:51:06,185 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3115, 0.5166, 1.0643, 1.7019, 1.5136, 1.2771, 1.2227, 1.2575], + device='cuda:0'), covar=tensor([1.1331, 1.4911, 1.5927, 1.7832, 1.1879, 1.6995, 1.7503, 1.5288], + device='cuda:0'), in_proj_covar=tensor([0.0427, 0.0468, 0.0555, 0.0575, 0.0459, 0.0485, 0.0497, 0.0499], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 15:51:19,864 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2280, 1.6816, 1.5769, 2.0155, 1.7442, 2.1818, 1.4934, 3.8960], + device='cuda:0'), covar=tensor([0.0722, 0.0754, 0.0854, 0.1196, 0.0691, 0.0524, 0.0795, 0.0132], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 15:51:38,540 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.846e+02 2.198e+02 2.575e+02 4.196e+02, threshold=4.395e+02, percent-clipped=1.0 +2023-04-26 15:51:39,225 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22626.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:51:42,918 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=2.04 vs. limit=2.0 +2023-04-26 15:51:47,647 INFO [finetune.py:976] (0/7) Epoch 4, batch 5450, loss[loss=0.2324, simple_loss=0.2756, pruned_loss=0.09456, over 4807.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2761, pruned_loss=0.07887, over 956314.79 frames. ], batch size: 51, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:51:51,453 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22639.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:52:39,581 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22674.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:52:50,911 INFO [finetune.py:976] (0/7) Epoch 4, batch 5500, loss[loss=0.2112, simple_loss=0.2691, pruned_loss=0.07669, over 4788.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2738, pruned_loss=0.07821, over 954649.27 frames. ], batch size: 29, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:53:12,617 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22700.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 15:53:34,588 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 2.045e+02 2.332e+02 2.799e+02 5.603e+02, threshold=4.665e+02, percent-clipped=2.0 +2023-04-26 15:53:40,512 INFO [finetune.py:976] (0/7) Epoch 4, batch 5550, loss[loss=0.2773, simple_loss=0.302, pruned_loss=0.1263, over 4251.00 frames. ], tot_loss[loss=0.2179, simple_loss=0.2765, pruned_loss=0.07968, over 955815.82 frames. ], batch size: 18, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:53:45,457 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22741.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:53:52,332 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-26 15:54:11,070 INFO [finetune.py:976] (0/7) Epoch 4, batch 5600, loss[loss=0.2727, simple_loss=0.319, pruned_loss=0.1132, over 4879.00 frames. ], tot_loss[loss=0.2221, simple_loss=0.2813, pruned_loss=0.08143, over 956766.08 frames. ], batch size: 32, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:54:12,910 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22786.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:54:18,071 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22794.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:54:18,715 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-26 15:54:22,263 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7979, 2.6287, 1.8105, 1.9642, 1.3500, 1.4234, 2.0158, 1.4540], + device='cuda:0'), covar=tensor([0.1877, 0.1851, 0.1945, 0.2175, 0.2894, 0.2181, 0.1333, 0.2400], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0221, 0.0180, 0.0209, 0.0219, 0.0189, 0.0174, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 15:54:37,298 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.226e+02 1.792e+02 2.075e+02 2.605e+02 4.713e+02, threshold=4.150e+02, percent-clipped=1.0 +2023-04-26 15:54:37,398 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=22825.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:54:41,494 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22832.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:54:42,026 INFO [finetune.py:976] (0/7) Epoch 4, batch 5650, loss[loss=0.2046, simple_loss=0.2813, pruned_loss=0.06391, over 4920.00 frames. ], tot_loss[loss=0.2232, simple_loss=0.2834, pruned_loss=0.08144, over 955850.98 frames. ], batch size: 36, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:54:42,645 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22834.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:54:55,178 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22855.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:54:57,540 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3288, 2.8994, 2.6240, 2.6918, 2.2372, 2.4629, 2.5399, 2.1026], + device='cuda:0'), covar=tensor([0.1930, 0.1177, 0.0790, 0.1091, 0.2681, 0.1103, 0.1764, 0.2500], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0328, 0.0238, 0.0301, 0.0319, 0.0284, 0.0271, 0.0292], + device='cuda:0'), out_proj_covar=tensor([1.2579e-04, 1.3343e-04, 9.7066e-05, 1.2124e-04, 1.3151e-04, 1.1454e-04, + 1.1168e-04, 1.1808e-04], device='cuda:0') +2023-04-26 15:55:15,948 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22871.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:55:27,436 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22880.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:55:29,215 INFO [finetune.py:976] (0/7) Epoch 4, batch 5700, loss[loss=0.2046, simple_loss=0.2412, pruned_loss=0.08399, over 4434.00 frames. ], tot_loss[loss=0.2203, simple_loss=0.2791, pruned_loss=0.08077, over 939220.82 frames. ], batch size: 19, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:55:31,111 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=22886.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:55:40,625 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=22893.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:56:02,572 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-4.pt +2023-04-26 15:56:19,328 INFO [finetune.py:976] (0/7) Epoch 5, batch 0, loss[loss=0.1979, simple_loss=0.2729, pruned_loss=0.06144, over 4861.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2729, pruned_loss=0.06144, over 4861.00 frames. ], batch size: 44, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:56:19,329 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 15:56:28,913 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2842, 1.1799, 1.5742, 1.4361, 1.2293, 1.0460, 1.3300, 0.9158], + device='cuda:0'), covar=tensor([0.0917, 0.0885, 0.0607, 0.0782, 0.0985, 0.1399, 0.0690, 0.0892], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0077, 0.0075, 0.0069, 0.0080, 0.0096, 0.0083, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 15:56:30,075 INFO [finetune.py:1010] (0/7) Epoch 5, validation: loss=0.1632, simple_loss=0.2369, pruned_loss=0.04473, over 2265189.00 frames. +2023-04-26 15:56:30,076 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 15:56:35,032 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22919.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:56:38,530 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.609e+02 1.975e+02 2.369e+02 5.506e+02, threshold=3.950e+02, percent-clipped=1.0 +2023-04-26 15:56:44,817 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-26 15:56:48,354 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=22941.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:56:52,743 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7932, 2.5881, 1.7926, 1.7514, 1.3230, 1.4335, 1.8643, 1.3057], + device='cuda:0'), covar=tensor([0.1992, 0.1639, 0.1945, 0.2283, 0.2998, 0.2278, 0.1449, 0.2455], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0221, 0.0180, 0.0210, 0.0219, 0.0189, 0.0174, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 15:56:53,303 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0796, 2.3870, 1.0641, 1.3783, 1.7283, 1.3893, 3.0619, 1.7647], + device='cuda:0'), covar=tensor([0.0630, 0.0554, 0.0744, 0.1312, 0.0540, 0.0950, 0.0237, 0.0618], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0070, 0.0052, 0.0049, 0.0054, 0.0054, 0.0082, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 15:57:02,124 INFO [finetune.py:976] (0/7) Epoch 5, batch 50, loss[loss=0.2544, simple_loss=0.3072, pruned_loss=0.1008, over 4836.00 frames. ], tot_loss[loss=0.2208, simple_loss=0.2815, pruned_loss=0.08002, over 217060.31 frames. ], batch size: 49, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:57:30,148 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=22995.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 15:57:46,131 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-26 15:57:52,330 INFO [finetune.py:976] (0/7) Epoch 5, batch 100, loss[loss=0.2229, simple_loss=0.2851, pruned_loss=0.08037, over 4898.00 frames. ], tot_loss[loss=0.2166, simple_loss=0.2753, pruned_loss=0.07895, over 378863.31 frames. ], batch size: 35, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:58:02,669 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.276e+01 1.854e+02 2.138e+02 2.662e+02 6.636e+02, threshold=4.277e+02, percent-clipped=4.0 +2023-04-26 15:58:12,531 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23041.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:58:25,556 INFO [finetune.py:976] (0/7) Epoch 5, batch 150, loss[loss=0.1888, simple_loss=0.2518, pruned_loss=0.06285, over 4919.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2698, pruned_loss=0.0769, over 507612.06 frames. ], batch size: 37, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:58:44,614 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=23089.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:58:48,350 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0218, 2.4598, 1.0586, 1.3720, 1.8240, 1.3269, 3.0348, 1.6404], + device='cuda:0'), covar=tensor([0.0640, 0.0592, 0.0775, 0.1205, 0.0489, 0.0941, 0.0262, 0.0623], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0070, 0.0052, 0.0049, 0.0053, 0.0054, 0.0082, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 15:58:57,365 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23108.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:58:59,559 INFO [finetune.py:976] (0/7) Epoch 5, batch 200, loss[loss=0.2109, simple_loss=0.2662, pruned_loss=0.07787, over 4850.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2707, pruned_loss=0.07715, over 608510.19 frames. ], batch size: 49, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:59:10,071 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.706e+02 2.083e+02 2.458e+02 7.322e+02, threshold=4.166e+02, percent-clipped=1.0 +2023-04-26 15:59:25,392 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23150.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:59:33,030 INFO [finetune.py:976] (0/7) Epoch 5, batch 250, loss[loss=0.2063, simple_loss=0.2544, pruned_loss=0.07916, over 4779.00 frames. ], tot_loss[loss=0.2172, simple_loss=0.2753, pruned_loss=0.07956, over 685038.04 frames. ], batch size: 26, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 15:59:38,557 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23169.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 15:59:43,649 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1307, 1.4628, 1.4314, 1.6944, 1.5438, 1.8008, 1.3279, 3.4997], + device='cuda:0'), covar=tensor([0.0739, 0.0840, 0.0872, 0.1283, 0.0728, 0.0616, 0.0827, 0.0185], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 15:59:47,774 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23181.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:00:06,065 INFO [finetune.py:976] (0/7) Epoch 5, batch 300, loss[loss=0.2477, simple_loss=0.3099, pruned_loss=0.09278, over 4906.00 frames. ], tot_loss[loss=0.2213, simple_loss=0.28, pruned_loss=0.08125, over 747970.52 frames. ], batch size: 35, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 16:00:10,765 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23217.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:00:15,563 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.275e+02 1.925e+02 2.300e+02 2.815e+02 4.609e+02, threshold=4.600e+02, percent-clipped=3.0 +2023-04-26 16:00:18,570 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23229.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:00:55,705 INFO [finetune.py:976] (0/7) Epoch 5, batch 350, loss[loss=0.1613, simple_loss=0.2277, pruned_loss=0.04752, over 4817.00 frames. ], tot_loss[loss=0.224, simple_loss=0.2823, pruned_loss=0.08282, over 791888.53 frames. ], batch size: 25, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 16:01:18,177 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23278.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:01:31,692 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23290.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:01:40,985 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23295.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:01:55,886 INFO [finetune.py:976] (0/7) Epoch 5, batch 400, loss[loss=0.2151, simple_loss=0.2664, pruned_loss=0.08186, over 4918.00 frames. ], tot_loss[loss=0.2217, simple_loss=0.2813, pruned_loss=0.08102, over 829870.56 frames. ], batch size: 38, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 16:02:05,328 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.686e+02 2.112e+02 2.575e+02 6.256e+02, threshold=4.223e+02, percent-clipped=1.0 +2023-04-26 16:02:07,202 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7242, 1.7537, 1.7865, 1.9480, 1.7250, 1.9866, 1.9194, 1.8765], + device='cuda:0'), covar=tensor([0.6513, 1.2424, 1.0531, 0.9333, 1.0869, 1.6374, 1.2325, 1.1590], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0399, 0.0320, 0.0327, 0.0350, 0.0414, 0.0384, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 16:02:18,849 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=23343.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:02:29,820 INFO [finetune.py:976] (0/7) Epoch 5, batch 450, loss[loss=0.1957, simple_loss=0.2721, pruned_loss=0.05964, over 4775.00 frames. ], tot_loss[loss=0.2189, simple_loss=0.2786, pruned_loss=0.07954, over 855272.11 frames. ], batch size: 26, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 16:03:14,688 INFO [finetune.py:976] (0/7) Epoch 5, batch 500, loss[loss=0.2113, simple_loss=0.2767, pruned_loss=0.07291, over 4788.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2761, pruned_loss=0.07872, over 876760.05 frames. ], batch size: 29, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 16:03:29,843 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.283e+02 1.686e+02 2.056e+02 2.772e+02 5.396e+02, threshold=4.112e+02, percent-clipped=3.0 +2023-04-26 16:03:47,508 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23450.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:03:51,335 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-26 16:03:54,136 INFO [finetune.py:976] (0/7) Epoch 5, batch 550, loss[loss=0.2087, simple_loss=0.2704, pruned_loss=0.0735, over 4893.00 frames. ], tot_loss[loss=0.2141, simple_loss=0.2733, pruned_loss=0.07746, over 895607.49 frames. ], batch size: 32, lr: 3.95e-03, grad_scale: 32.0 +2023-04-26 16:03:56,060 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23464.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:04:07,433 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23481.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:04:12,219 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=23488.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:04:19,173 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4409, 0.8682, 1.4119, 1.8386, 1.6306, 1.4033, 1.4746, 1.4842], + device='cuda:0'), covar=tensor([1.0367, 1.4154, 1.3933, 1.6203, 1.1977, 1.4894, 1.5986, 1.2868], + device='cuda:0'), in_proj_covar=tensor([0.0425, 0.0464, 0.0552, 0.0570, 0.0457, 0.0481, 0.0492, 0.0495], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:04:19,686 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=23498.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:04:27,578 INFO [finetune.py:976] (0/7) Epoch 5, batch 600, loss[loss=0.206, simple_loss=0.2775, pruned_loss=0.06728, over 4832.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2724, pruned_loss=0.07723, over 907955.88 frames. ], batch size: 40, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:04:36,131 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.963e+02 2.277e+02 2.707e+02 6.010e+02, threshold=4.553e+02, percent-clipped=1.0 +2023-04-26 16:04:39,606 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=23529.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:04:53,877 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=23549.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 16:05:01,027 INFO [finetune.py:976] (0/7) Epoch 5, batch 650, loss[loss=0.2462, simple_loss=0.3131, pruned_loss=0.08969, over 4817.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2766, pruned_loss=0.0787, over 920005.82 frames. ], batch size: 39, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:05:08,393 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23573.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:05:16,708 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23585.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:05:34,423 INFO [finetune.py:976] (0/7) Epoch 5, batch 700, loss[loss=0.2239, simple_loss=0.2767, pruned_loss=0.08551, over 4275.00 frames. ], tot_loss[loss=0.2212, simple_loss=0.2804, pruned_loss=0.08097, over 927580.02 frames. ], batch size: 18, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:05:42,878 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.112e+02 1.928e+02 2.468e+02 2.939e+02 6.493e+02, threshold=4.936e+02, percent-clipped=4.0 +2023-04-26 16:06:19,708 INFO [finetune.py:976] (0/7) Epoch 5, batch 750, loss[loss=0.2015, simple_loss=0.2801, pruned_loss=0.06144, over 4887.00 frames. ], tot_loss[loss=0.223, simple_loss=0.2824, pruned_loss=0.08186, over 934946.48 frames. ], batch size: 35, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:07:02,411 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-26 16:07:26,345 INFO [finetune.py:976] (0/7) Epoch 5, batch 800, loss[loss=0.2582, simple_loss=0.3026, pruned_loss=0.1069, over 4897.00 frames. ], tot_loss[loss=0.2215, simple_loss=0.2812, pruned_loss=0.08085, over 938995.84 frames. ], batch size: 37, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:07:34,819 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.218e+02 1.744e+02 2.077e+02 2.568e+02 5.488e+02, threshold=4.154e+02, percent-clipped=2.0 +2023-04-26 16:07:38,670 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5039, 1.3161, 1.7230, 1.6774, 1.4114, 1.1504, 1.4450, 1.1046], + device='cuda:0'), covar=tensor([0.0677, 0.0773, 0.0497, 0.0915, 0.0851, 0.1371, 0.0736, 0.0871], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0078, 0.0077, 0.0071, 0.0082, 0.0098, 0.0085, 0.0080], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 16:08:00,117 INFO [finetune.py:976] (0/7) Epoch 5, batch 850, loss[loss=0.232, simple_loss=0.2914, pruned_loss=0.08633, over 4892.00 frames. ], tot_loss[loss=0.219, simple_loss=0.2786, pruned_loss=0.07973, over 943251.35 frames. ], batch size: 32, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:08:02,000 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23764.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:08:39,279 INFO [finetune.py:976] (0/7) Epoch 5, batch 900, loss[loss=0.2169, simple_loss=0.2755, pruned_loss=0.07913, over 4915.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2756, pruned_loss=0.0783, over 946681.30 frames. ], batch size: 37, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:08:40,383 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=23812.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:08:54,018 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.680e+02 2.075e+02 2.492e+02 8.869e+02, threshold=4.150e+02, percent-clipped=5.0 +2023-04-26 16:09:12,022 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5584, 1.3028, 1.7059, 1.7670, 1.4227, 1.0399, 1.4454, 0.9719], + device='cuda:0'), covar=tensor([0.0789, 0.0956, 0.0636, 0.0900, 0.0985, 0.2119, 0.1008, 0.1342], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0078, 0.0077, 0.0071, 0.0082, 0.0098, 0.0085, 0.0080], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0004, 0.0003], + device='cuda:0') +2023-04-26 16:09:23,343 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=23844.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 16:09:46,929 INFO [finetune.py:976] (0/7) Epoch 5, batch 950, loss[loss=0.1969, simple_loss=0.2507, pruned_loss=0.0715, over 4386.00 frames. ], tot_loss[loss=0.2155, simple_loss=0.2743, pruned_loss=0.0783, over 948672.47 frames. ], batch size: 19, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:09:56,923 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23873.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:10:14,848 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=23885.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:10:44,366 INFO [finetune.py:976] (0/7) Epoch 5, batch 1000, loss[loss=0.2111, simple_loss=0.2606, pruned_loss=0.08084, over 4258.00 frames. ], tot_loss[loss=0.217, simple_loss=0.2756, pruned_loss=0.07914, over 947804.34 frames. ], batch size: 65, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:10:51,519 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7682, 1.6114, 2.0356, 2.0938, 1.9483, 1.6682, 1.8571, 1.8788], + device='cuda:0'), covar=tensor([1.2829, 1.6555, 1.9004, 1.8119, 1.4127, 2.2114, 2.1220, 1.6923], + device='cuda:0'), in_proj_covar=tensor([0.0425, 0.0463, 0.0551, 0.0569, 0.0456, 0.0480, 0.0492, 0.0494], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:10:52,027 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=23921.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:10:54,330 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.295e+02 1.932e+02 2.275e+02 2.774e+02 5.327e+02, threshold=4.551e+02, percent-clipped=3.0 +2023-04-26 16:10:59,243 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=23933.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:11:10,253 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-26 16:11:17,666 INFO [finetune.py:976] (0/7) Epoch 5, batch 1050, loss[loss=0.2009, simple_loss=0.271, pruned_loss=0.06546, over 4895.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2776, pruned_loss=0.07975, over 948754.57 frames. ], batch size: 35, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:11:58,195 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6128, 2.4693, 1.6567, 1.5759, 1.2296, 1.2733, 1.7360, 1.1771], + device='cuda:0'), covar=tensor([0.2008, 0.1626, 0.1910, 0.2406, 0.3017, 0.2397, 0.1400, 0.2548], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0221, 0.0180, 0.0209, 0.0218, 0.0189, 0.0173, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 16:12:12,078 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-24000.pt +2023-04-26 16:12:24,064 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.62 vs. limit=5.0 +2023-04-26 16:12:31,172 INFO [finetune.py:976] (0/7) Epoch 5, batch 1100, loss[loss=0.2146, simple_loss=0.281, pruned_loss=0.07415, over 4902.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2789, pruned_loss=0.0801, over 949850.16 frames. ], batch size: 37, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:12:45,515 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.787e+02 2.210e+02 2.620e+02 5.269e+02, threshold=4.419e+02, percent-clipped=3.0 +2023-04-26 16:12:53,363 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24037.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:13:06,377 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9124, 1.3955, 1.4892, 1.5265, 2.1092, 1.7449, 1.4088, 1.3957], + device='cuda:0'), covar=tensor([0.1857, 0.1819, 0.2261, 0.1613, 0.0923, 0.1680, 0.2473, 0.2351], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0339, 0.0355, 0.0312, 0.0349, 0.0350, 0.0315, 0.0358], + device='cuda:0'), out_proj_covar=tensor([6.8393e-05, 7.2726e-05, 7.6898e-05, 6.5263e-05, 7.4080e-05, 7.6138e-05, + 6.8302e-05, 7.7247e-05], device='cuda:0') +2023-04-26 16:13:08,675 INFO [finetune.py:976] (0/7) Epoch 5, batch 1150, loss[loss=0.2104, simple_loss=0.2729, pruned_loss=0.07393, over 4905.00 frames. ], tot_loss[loss=0.2207, simple_loss=0.2801, pruned_loss=0.08068, over 951664.62 frames. ], batch size: 36, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:13:10,074 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.92 vs. limit=5.0 +2023-04-26 16:13:33,503 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24098.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:13:42,165 INFO [finetune.py:976] (0/7) Epoch 5, batch 1200, loss[loss=0.2536, simple_loss=0.3072, pruned_loss=0.1, over 4849.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2773, pruned_loss=0.07916, over 950395.68 frames. ], batch size: 44, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:13:52,168 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.100e+02 1.779e+02 2.090e+02 2.399e+02 4.332e+02, threshold=4.179e+02, percent-clipped=0.0 +2023-04-26 16:14:04,700 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24144.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 16:14:08,472 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-26 16:14:15,789 INFO [finetune.py:976] (0/7) Epoch 5, batch 1250, loss[loss=0.2107, simple_loss=0.259, pruned_loss=0.08121, over 4783.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.274, pruned_loss=0.07755, over 951749.17 frames. ], batch size: 29, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:14:37,071 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=24192.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:15:00,606 INFO [finetune.py:976] (0/7) Epoch 5, batch 1300, loss[loss=0.1951, simple_loss=0.2621, pruned_loss=0.06408, over 4926.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.27, pruned_loss=0.07554, over 953977.47 frames. ], batch size: 33, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:15:10,677 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.317e+02 1.846e+02 2.149e+02 2.713e+02 6.103e+02, threshold=4.299e+02, percent-clipped=1.0 +2023-04-26 16:15:49,983 INFO [finetune.py:976] (0/7) Epoch 5, batch 1350, loss[loss=0.2006, simple_loss=0.2714, pruned_loss=0.06488, over 4874.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2702, pruned_loss=0.07607, over 954908.44 frames. ], batch size: 31, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:16:26,097 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9904, 2.6074, 1.0816, 1.2602, 2.0064, 1.2075, 3.4553, 1.7259], + device='cuda:0'), covar=tensor([0.0705, 0.0683, 0.0797, 0.1398, 0.0513, 0.1125, 0.0323, 0.0680], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0071, 0.0053, 0.0050, 0.0054, 0.0055, 0.0083, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 16:16:29,536 INFO [finetune.py:976] (0/7) Epoch 5, batch 1400, loss[loss=0.22, simple_loss=0.2705, pruned_loss=0.08475, over 4718.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2747, pruned_loss=0.07713, over 956748.90 frames. ], batch size: 23, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:16:38,503 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.222e+02 1.836e+02 2.129e+02 2.470e+02 6.262e+02, threshold=4.259e+02, percent-clipped=1.0 +2023-04-26 16:16:38,796 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-26 16:17:19,152 INFO [finetune.py:976] (0/7) Epoch 5, batch 1450, loss[loss=0.22, simple_loss=0.2855, pruned_loss=0.07725, over 4824.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2769, pruned_loss=0.07759, over 954393.85 frames. ], batch size: 33, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:17:28,184 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3124, 1.5889, 1.3796, 1.5998, 1.3066, 1.2810, 1.4643, 1.2092], + device='cuda:0'), covar=tensor([0.1900, 0.1354, 0.1067, 0.1130, 0.3491, 0.1307, 0.1727, 0.2261], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0333, 0.0242, 0.0306, 0.0326, 0.0287, 0.0275, 0.0299], + device='cuda:0'), out_proj_covar=tensor([1.2776e-04, 1.3531e-04, 9.8408e-05, 1.2304e-04, 1.3432e-04, 1.1596e-04, + 1.1334e-04, 1.2069e-04], device='cuda:0') +2023-04-26 16:18:00,685 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24393.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:18:08,903 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6960, 1.7468, 1.6874, 1.3208, 1.8283, 1.3758, 2.3152, 1.3130], + device='cuda:0'), covar=tensor([0.3604, 0.1507, 0.4004, 0.2540, 0.1574, 0.2376, 0.1106, 0.4244], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0356, 0.0437, 0.0369, 0.0397, 0.0384, 0.0394, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:18:11,612 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-26 16:18:16,236 INFO [finetune.py:976] (0/7) Epoch 5, batch 1500, loss[loss=0.19, simple_loss=0.2458, pruned_loss=0.06706, over 4786.00 frames. ], tot_loss[loss=0.2188, simple_loss=0.2795, pruned_loss=0.07911, over 953026.35 frames. ], batch size: 51, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:18:22,532 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.75 vs. limit=5.0 +2023-04-26 16:18:25,729 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.847e+02 2.193e+02 2.526e+02 4.286e+02, threshold=4.386e+02, percent-clipped=1.0 +2023-04-26 16:18:49,538 INFO [finetune.py:976] (0/7) Epoch 5, batch 1550, loss[loss=0.2777, simple_loss=0.33, pruned_loss=0.1127, over 4824.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2788, pruned_loss=0.07839, over 954461.65 frames. ], batch size: 33, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:18:59,795 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1855, 2.0978, 1.7390, 1.7515, 2.1691, 1.6657, 2.6092, 1.4953], + device='cuda:0'), covar=tensor([0.4162, 0.1706, 0.5249, 0.3563, 0.2045, 0.2796, 0.1835, 0.4540], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0357, 0.0439, 0.0371, 0.0399, 0.0386, 0.0395, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:19:22,930 INFO [finetune.py:976] (0/7) Epoch 5, batch 1600, loss[loss=0.2483, simple_loss=0.3044, pruned_loss=0.09609, over 4713.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2761, pruned_loss=0.07734, over 954903.12 frames. ], batch size: 59, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:19:32,017 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.836e+02 2.156e+02 2.625e+02 3.904e+02, threshold=4.311e+02, percent-clipped=0.0 +2023-04-26 16:19:48,163 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24547.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:19:56,586 INFO [finetune.py:976] (0/7) Epoch 5, batch 1650, loss[loss=0.1751, simple_loss=0.2453, pruned_loss=0.0525, over 4830.00 frames. ], tot_loss[loss=0.2132, simple_loss=0.2732, pruned_loss=0.07662, over 955124.73 frames. ], batch size: 40, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:19:59,164 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0582, 1.8165, 2.1146, 2.3670, 1.8504, 1.5029, 2.0493, 1.0974], + device='cuda:0'), covar=tensor([0.0775, 0.0852, 0.0777, 0.0969, 0.0961, 0.1441, 0.0812, 0.1148], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0077, 0.0076, 0.0070, 0.0080, 0.0096, 0.0083, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 16:20:01,610 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24569.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:20:28,455 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24608.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:20:30,138 INFO [finetune.py:976] (0/7) Epoch 5, batch 1700, loss[loss=0.2113, simple_loss=0.2772, pruned_loss=0.07274, over 4755.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2706, pruned_loss=0.07584, over 953179.76 frames. ], batch size: 54, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:20:38,586 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.758e+02 2.083e+02 2.406e+02 5.348e+02, threshold=4.165e+02, percent-clipped=1.0 +2023-04-26 16:20:42,277 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24630.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:21:03,215 INFO [finetune.py:976] (0/7) Epoch 5, batch 1750, loss[loss=0.2722, simple_loss=0.3227, pruned_loss=0.1109, over 4809.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2736, pruned_loss=0.07714, over 954246.09 frames. ], batch size: 51, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:21:10,121 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9369, 1.3702, 1.8021, 2.1088, 1.6649, 1.3084, 1.0105, 1.5289], + device='cuda:0'), covar=tensor([0.3836, 0.4636, 0.1917, 0.3401, 0.3971, 0.3481, 0.6110, 0.3519], + device='cuda:0'), in_proj_covar=tensor([0.0275, 0.0261, 0.0220, 0.0332, 0.0221, 0.0230, 0.0248, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 16:21:35,452 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=24693.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:21:48,677 INFO [finetune.py:976] (0/7) Epoch 5, batch 1800, loss[loss=0.176, simple_loss=0.2405, pruned_loss=0.05571, over 4774.00 frames. ], tot_loss[loss=0.2162, simple_loss=0.2763, pruned_loss=0.07803, over 953452.41 frames. ], batch size: 29, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:21:57,278 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.363e+02 2.030e+02 2.431e+02 2.911e+02 5.485e+02, threshold=4.863e+02, percent-clipped=5.0 +2023-04-26 16:22:08,099 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=24741.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:22:08,142 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9486, 1.4712, 1.3071, 1.6957, 1.5843, 1.8529, 1.3661, 3.2089], + device='cuda:0'), covar=tensor([0.0774, 0.0781, 0.0823, 0.1183, 0.0624, 0.0591, 0.0758, 0.0175], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0041, 0.0046, 0.0041, 0.0041, 0.0040, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 16:22:09,948 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1814, 1.5930, 1.4153, 1.7897, 1.6942, 1.9146, 1.4329, 3.5539], + device='cuda:0'), covar=tensor([0.0697, 0.0768, 0.0831, 0.1236, 0.0655, 0.0534, 0.0750, 0.0145], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0041, 0.0046, 0.0041, 0.0041, 0.0040, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 16:22:12,345 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.51 vs. limit=2.0 +2023-04-26 16:22:27,731 INFO [finetune.py:976] (0/7) Epoch 5, batch 1850, loss[loss=0.2606, simple_loss=0.3144, pruned_loss=0.1034, over 4812.00 frames. ], tot_loss[loss=0.2191, simple_loss=0.2795, pruned_loss=0.0794, over 952658.29 frames. ], batch size: 39, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:22:30,298 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24765.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:22:50,704 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 16:23:06,423 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.86 vs. limit=5.0 +2023-04-26 16:23:09,350 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0356, 2.5338, 1.0783, 1.3052, 2.0609, 1.2122, 3.2659, 1.6399], + device='cuda:0'), covar=tensor([0.0642, 0.0665, 0.0699, 0.1189, 0.0445, 0.0978, 0.0193, 0.0604], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0071, 0.0052, 0.0050, 0.0054, 0.0055, 0.0083, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 16:23:20,127 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-26 16:23:29,633 INFO [finetune.py:976] (0/7) Epoch 5, batch 1900, loss[loss=0.215, simple_loss=0.2825, pruned_loss=0.07373, over 4848.00 frames. ], tot_loss[loss=0.2196, simple_loss=0.2803, pruned_loss=0.07938, over 953454.09 frames. ], batch size: 44, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:23:43,091 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7467, 2.3639, 1.9158, 2.1961, 1.7023, 1.8019, 2.1275, 1.5531], + device='cuda:0'), covar=tensor([0.2730, 0.1818, 0.1357, 0.1870, 0.3590, 0.1926, 0.2033, 0.3013], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0332, 0.0242, 0.0305, 0.0325, 0.0286, 0.0274, 0.0299], + device='cuda:0'), out_proj_covar=tensor([1.2745e-04, 1.3497e-04, 9.8480e-05, 1.2274e-04, 1.3374e-04, 1.1545e-04, + 1.1277e-04, 1.2070e-04], device='cuda:0') +2023-04-26 16:23:44,192 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.223e+02 1.736e+02 2.141e+02 2.516e+02 4.890e+02, threshold=4.282e+02, percent-clipped=1.0 +2023-04-26 16:23:50,620 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24826.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:24:16,057 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2276, 1.5745, 1.4024, 1.7786, 1.6540, 2.1804, 1.4211, 3.6377], + device='cuda:0'), covar=tensor([0.0699, 0.0757, 0.0804, 0.1258, 0.0656, 0.0475, 0.0746, 0.0150], + device='cuda:0'), in_proj_covar=tensor([0.0040, 0.0040, 0.0042, 0.0046, 0.0041, 0.0041, 0.0040, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 16:24:16,081 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24848.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:24:31,093 INFO [finetune.py:976] (0/7) Epoch 5, batch 1950, loss[loss=0.195, simple_loss=0.2609, pruned_loss=0.06454, over 4924.00 frames. ], tot_loss[loss=0.2167, simple_loss=0.2782, pruned_loss=0.07759, over 954845.61 frames. ], batch size: 33, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:24:58,637 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24903.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:25:03,314 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=24909.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:25:04,919 INFO [finetune.py:976] (0/7) Epoch 5, batch 2000, loss[loss=0.1825, simple_loss=0.2497, pruned_loss=0.0577, over 4820.00 frames. ], tot_loss[loss=0.2146, simple_loss=0.2751, pruned_loss=0.07711, over 955031.87 frames. ], batch size: 33, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:25:13,836 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.797e+02 2.078e+02 2.506e+02 3.857e+02, threshold=4.156e+02, percent-clipped=0.0 +2023-04-26 16:25:13,928 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=24925.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:25:37,718 INFO [finetune.py:976] (0/7) Epoch 5, batch 2050, loss[loss=0.1983, simple_loss=0.2576, pruned_loss=0.0695, over 4798.00 frames. ], tot_loss[loss=0.2119, simple_loss=0.2719, pruned_loss=0.076, over 955443.58 frames. ], batch size: 29, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:25:59,891 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=24995.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:26:10,616 INFO [finetune.py:976] (0/7) Epoch 5, batch 2100, loss[loss=0.2467, simple_loss=0.3141, pruned_loss=0.08972, over 4753.00 frames. ], tot_loss[loss=0.2113, simple_loss=0.2712, pruned_loss=0.07572, over 953442.11 frames. ], batch size: 59, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:26:21,075 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.851e+02 2.091e+02 2.608e+02 4.715e+02, threshold=4.183e+02, percent-clipped=2.0 +2023-04-26 16:26:25,547 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-26 16:26:39,205 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-04-26 16:26:40,914 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25056.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 16:26:43,815 INFO [finetune.py:976] (0/7) Epoch 5, batch 2150, loss[loss=0.3015, simple_loss=0.3552, pruned_loss=0.1239, over 4810.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2735, pruned_loss=0.07662, over 953360.26 frames. ], batch size: 41, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:27:00,598 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5577, 1.2895, 1.6284, 1.9209, 1.7170, 1.5797, 1.6079, 1.6574], + device='cuda:0'), covar=tensor([1.1056, 1.4409, 1.5184, 1.6512, 1.2552, 1.7148, 1.8133, 1.4160], + device='cuda:0'), in_proj_covar=tensor([0.0423, 0.0459, 0.0548, 0.0565, 0.0453, 0.0476, 0.0489, 0.0491], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:27:11,969 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3093, 3.9798, 0.6679, 1.8662, 1.9380, 2.5178, 2.2424, 0.9996], + device='cuda:0'), covar=tensor([0.1884, 0.1493, 0.2825, 0.1943, 0.1362, 0.1591, 0.1790, 0.2431], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0265, 0.0147, 0.0129, 0.0139, 0.0161, 0.0126, 0.0129], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 16:27:17,133 INFO [finetune.py:976] (0/7) Epoch 5, batch 2200, loss[loss=0.2546, simple_loss=0.3134, pruned_loss=0.09793, over 4899.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2767, pruned_loss=0.07754, over 954356.40 frames. ], batch size: 35, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:27:19,713 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0901, 1.2693, 1.4023, 1.5723, 1.5128, 1.6996, 1.4710, 1.4857], + device='cuda:0'), covar=tensor([0.6555, 1.0565, 0.9328, 0.8736, 0.9457, 1.4225, 1.0463, 1.0014], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0397, 0.0316, 0.0326, 0.0347, 0.0411, 0.0379, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 16:27:24,258 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25121.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:27:27,078 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.188e+02 1.776e+02 2.171e+02 2.634e+02 4.750e+02, threshold=4.343e+02, percent-clipped=2.0 +2023-04-26 16:27:27,878 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-26 16:27:37,069 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-26 16:27:50,267 INFO [finetune.py:976] (0/7) Epoch 5, batch 2250, loss[loss=0.2412, simple_loss=0.3022, pruned_loss=0.09012, over 4815.00 frames. ], tot_loss[loss=0.2186, simple_loss=0.2795, pruned_loss=0.07885, over 955896.73 frames. ], batch size: 33, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:27:53,470 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7084, 1.5472, 1.9477, 2.0129, 1.9718, 1.6453, 1.7611, 1.7824], + device='cuda:0'), covar=tensor([1.1922, 1.5902, 1.8516, 1.7830, 1.2883, 2.1515, 2.0586, 1.7051], + device='cuda:0'), in_proj_covar=tensor([0.0422, 0.0457, 0.0546, 0.0563, 0.0452, 0.0475, 0.0488, 0.0489], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:28:26,342 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25203.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:28:26,891 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25204.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:28:32,137 INFO [finetune.py:976] (0/7) Epoch 5, batch 2300, loss[loss=0.2491, simple_loss=0.3077, pruned_loss=0.09524, over 4797.00 frames. ], tot_loss[loss=0.2185, simple_loss=0.2797, pruned_loss=0.07869, over 956976.64 frames. ], batch size: 51, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:28:52,194 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.823e+02 2.210e+02 2.598e+02 6.851e+02, threshold=4.421e+02, percent-clipped=2.0 +2023-04-26 16:28:52,311 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25225.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:29:14,383 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6674, 2.0356, 1.6684, 1.8620, 1.6185, 1.6343, 1.7071, 1.4029], + device='cuda:0'), covar=tensor([0.2144, 0.1781, 0.1145, 0.1506, 0.3551, 0.1648, 0.2045, 0.2682], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0330, 0.0241, 0.0303, 0.0323, 0.0285, 0.0273, 0.0298], + device='cuda:0'), out_proj_covar=tensor([1.2720e-04, 1.3408e-04, 9.8084e-05, 1.2206e-04, 1.3285e-04, 1.1520e-04, + 1.1253e-04, 1.2024e-04], device='cuda:0') +2023-04-26 16:29:26,416 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=25251.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:29:38,267 INFO [finetune.py:976] (0/7) Epoch 5, batch 2350, loss[loss=0.2131, simple_loss=0.271, pruned_loss=0.07756, over 4872.00 frames. ], tot_loss[loss=0.2168, simple_loss=0.2777, pruned_loss=0.07796, over 957229.89 frames. ], batch size: 31, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:29:57,349 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=25273.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:30:39,274 INFO [finetune.py:976] (0/7) Epoch 5, batch 2400, loss[loss=0.1974, simple_loss=0.2576, pruned_loss=0.06854, over 4937.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2743, pruned_loss=0.0771, over 957206.77 frames. ], batch size: 33, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:30:48,373 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.145e+02 1.795e+02 2.098e+02 2.522e+02 5.509e+02, threshold=4.195e+02, percent-clipped=3.0 +2023-04-26 16:31:12,373 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25351.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 16:31:18,441 INFO [finetune.py:976] (0/7) Epoch 5, batch 2450, loss[loss=0.1581, simple_loss=0.222, pruned_loss=0.04705, over 4706.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.27, pruned_loss=0.07521, over 956785.81 frames. ], batch size: 23, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:31:37,571 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9608, 2.4369, 1.0153, 1.2430, 1.7583, 1.1845, 2.9773, 1.5174], + device='cuda:0'), covar=tensor([0.0742, 0.0630, 0.0847, 0.1456, 0.0601, 0.1180, 0.0289, 0.0800], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0070, 0.0052, 0.0049, 0.0054, 0.0055, 0.0082, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 16:32:08,174 INFO [finetune.py:976] (0/7) Epoch 5, batch 2500, loss[loss=0.248, simple_loss=0.3189, pruned_loss=0.08851, over 4896.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.2715, pruned_loss=0.07611, over 956681.39 frames. ], batch size: 43, lr: 3.94e-03, grad_scale: 64.0 +2023-04-26 16:32:15,241 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25421.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:32:17,546 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.862e+02 2.343e+02 2.826e+02 5.817e+02, threshold=4.685e+02, percent-clipped=1.0 +2023-04-26 16:32:18,288 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5114, 1.5294, 0.6314, 1.2221, 1.5969, 1.4055, 1.2995, 1.3241], + device='cuda:0'), covar=tensor([0.0543, 0.0427, 0.0441, 0.0611, 0.0308, 0.0567, 0.0561, 0.0641], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0024, 0.0030, 0.0022, 0.0030, 0.0030, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0038, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 16:32:19,530 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25428.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 16:32:40,830 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4617, 1.2466, 1.7430, 1.6668, 1.3160, 1.1706, 1.4058, 0.9547], + device='cuda:0'), covar=tensor([0.0667, 0.0965, 0.0555, 0.0882, 0.1085, 0.1511, 0.0744, 0.0979], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0077, 0.0075, 0.0070, 0.0081, 0.0096, 0.0083, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 16:32:41,938 INFO [finetune.py:976] (0/7) Epoch 5, batch 2550, loss[loss=0.1679, simple_loss=0.2359, pruned_loss=0.0499, over 4796.00 frames. ], tot_loss[loss=0.2156, simple_loss=0.2758, pruned_loss=0.07776, over 955679.51 frames. ], batch size: 25, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:32:47,370 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=25469.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:33:01,474 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25489.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 16:33:11,416 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25504.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:33:15,645 INFO [finetune.py:976] (0/7) Epoch 5, batch 2600, loss[loss=0.1745, simple_loss=0.2456, pruned_loss=0.05174, over 4782.00 frames. ], tot_loss[loss=0.2161, simple_loss=0.2765, pruned_loss=0.07788, over 953781.49 frames. ], batch size: 29, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:33:25,304 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.303e+02 1.830e+02 2.137e+02 2.517e+02 4.934e+02, threshold=4.274e+02, percent-clipped=1.0 +2023-04-26 16:33:43,922 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=25552.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:33:49,356 INFO [finetune.py:976] (0/7) Epoch 5, batch 2650, loss[loss=0.1887, simple_loss=0.2526, pruned_loss=0.06237, over 4820.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2761, pruned_loss=0.07711, over 952145.41 frames. ], batch size: 33, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:34:09,725 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1141, 1.4840, 1.3452, 1.7203, 1.5686, 1.9636, 1.3835, 3.4358], + device='cuda:0'), covar=tensor([0.0681, 0.0791, 0.0789, 0.1249, 0.0659, 0.0574, 0.0758, 0.0157], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0040, 0.0041, 0.0046, 0.0041, 0.0041, 0.0040, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 16:34:12,532 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5366, 1.9139, 1.5141, 1.1833, 1.2055, 1.2190, 1.5120, 1.1306], + device='cuda:0'), covar=tensor([0.2029, 0.1712, 0.2007, 0.2436, 0.3163, 0.2449, 0.1478, 0.2528], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0222, 0.0179, 0.0209, 0.0216, 0.0189, 0.0172, 0.0197], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 16:34:28,770 INFO [finetune.py:976] (0/7) Epoch 5, batch 2700, loss[loss=0.1705, simple_loss=0.2421, pruned_loss=0.04943, over 4820.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2745, pruned_loss=0.07583, over 952112.65 frames. ], batch size: 39, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:34:48,614 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.304e+02 1.757e+02 2.118e+02 2.619e+02 4.731e+02, threshold=4.237e+02, percent-clipped=2.0 +2023-04-26 16:35:16,716 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.2894, 1.3447, 1.4259, 0.9996, 1.3589, 1.1275, 1.6581, 1.3252], + device='cuda:0'), covar=tensor([0.4333, 0.1897, 0.5629, 0.3046, 0.1821, 0.2637, 0.1900, 0.5199], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0352, 0.0431, 0.0365, 0.0394, 0.0382, 0.0394, 0.0416], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:35:20,241 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=25651.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 16:35:28,066 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3948, 3.0132, 0.8087, 1.6634, 1.7623, 2.0506, 1.7526, 1.0263], + device='cuda:0'), covar=tensor([0.1441, 0.1011, 0.2120, 0.1405, 0.1079, 0.1139, 0.1611, 0.1913], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0265, 0.0148, 0.0130, 0.0139, 0.0162, 0.0127, 0.0129], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 16:35:37,124 INFO [finetune.py:976] (0/7) Epoch 5, batch 2750, loss[loss=0.1721, simple_loss=0.2228, pruned_loss=0.06064, over 4261.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2713, pruned_loss=0.07471, over 951978.05 frames. ], batch size: 18, lr: 3.94e-03, grad_scale: 32.0 +2023-04-26 16:35:52,013 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-26 16:36:13,076 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.76 vs. limit=5.0 +2023-04-26 16:36:24,079 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=25699.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:36:32,332 INFO [finetune.py:976] (0/7) Epoch 5, batch 2800, loss[loss=0.1894, simple_loss=0.2522, pruned_loss=0.06332, over 4756.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2676, pruned_loss=0.07356, over 953190.72 frames. ], batch size: 27, lr: 3.93e-03, grad_scale: 32.0 +2023-04-26 16:36:47,384 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.821e+02 2.084e+02 2.532e+02 5.696e+02, threshold=4.167e+02, percent-clipped=4.0 +2023-04-26 16:37:23,593 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-26 16:37:32,909 INFO [finetune.py:976] (0/7) Epoch 5, batch 2850, loss[loss=0.2502, simple_loss=0.3163, pruned_loss=0.09202, over 4927.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.266, pruned_loss=0.0729, over 955750.70 frames. ], batch size: 42, lr: 3.93e-03, grad_scale: 32.0 +2023-04-26 16:37:46,996 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=25784.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 16:37:48,767 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9904, 2.4790, 1.0590, 1.3493, 1.9258, 1.2327, 3.2471, 1.7078], + device='cuda:0'), covar=tensor([0.0716, 0.0636, 0.0765, 0.1288, 0.0531, 0.1000, 0.0248, 0.0629], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0071, 0.0053, 0.0049, 0.0055, 0.0055, 0.0083, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 16:38:03,317 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.0112, 3.7740, 2.7787, 4.5732, 3.9152, 3.9787, 1.8103, 3.9507], + device='cuda:0'), covar=tensor([0.1361, 0.1204, 0.3029, 0.1369, 0.4512, 0.1747, 0.5372, 0.1974], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0221, 0.0254, 0.0309, 0.0302, 0.0255, 0.0272, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 16:38:06,695 INFO [finetune.py:976] (0/7) Epoch 5, batch 2900, loss[loss=0.2093, simple_loss=0.2714, pruned_loss=0.0736, over 4862.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2688, pruned_loss=0.074, over 956213.85 frames. ], batch size: 34, lr: 3.93e-03, grad_scale: 32.0 +2023-04-26 16:38:12,266 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2505, 1.5572, 1.4828, 1.8195, 1.7271, 1.8494, 1.5427, 3.0683], + device='cuda:0'), covar=tensor([0.0672, 0.0711, 0.0715, 0.1081, 0.0576, 0.0673, 0.0698, 0.0199], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0040, 0.0041, 0.0046, 0.0041, 0.0041, 0.0040, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 16:38:15,790 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.320e+02 1.753e+02 2.187e+02 2.584e+02 6.163e+02, threshold=4.374e+02, percent-clipped=2.0 +2023-04-26 16:38:37,771 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=25858.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 16:38:39,948 INFO [finetune.py:976] (0/7) Epoch 5, batch 2950, loss[loss=0.2287, simple_loss=0.2845, pruned_loss=0.08649, over 4925.00 frames. ], tot_loss[loss=0.211, simple_loss=0.2722, pruned_loss=0.07494, over 955665.67 frames. ], batch size: 33, lr: 3.93e-03, grad_scale: 32.0 +2023-04-26 16:39:12,517 INFO [finetune.py:976] (0/7) Epoch 5, batch 3000, loss[loss=0.1739, simple_loss=0.2299, pruned_loss=0.05897, over 4730.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2748, pruned_loss=0.07597, over 957398.56 frames. ], batch size: 23, lr: 3.93e-03, grad_scale: 32.0 +2023-04-26 16:39:12,518 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 16:39:29,056 INFO [finetune.py:1010] (0/7) Epoch 5, validation: loss=0.1595, simple_loss=0.233, pruned_loss=0.04303, over 2265189.00 frames. +2023-04-26 16:39:29,057 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 16:39:41,219 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=25919.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 16:39:51,739 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.795e+02 2.151e+02 2.692e+02 4.010e+02, threshold=4.303e+02, percent-clipped=0.0 +2023-04-26 16:40:35,901 INFO [finetune.py:976] (0/7) Epoch 5, batch 3050, loss[loss=0.2176, simple_loss=0.2843, pruned_loss=0.07541, over 4706.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2769, pruned_loss=0.07658, over 956894.89 frames. ], batch size: 59, lr: 3.93e-03, grad_scale: 32.0 +2023-04-26 16:41:24,067 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-26000.pt +2023-04-26 16:41:32,849 INFO [finetune.py:976] (0/7) Epoch 5, batch 3100, loss[loss=0.2225, simple_loss=0.2817, pruned_loss=0.08164, over 4776.00 frames. ], tot_loss[loss=0.2131, simple_loss=0.2749, pruned_loss=0.07562, over 956558.03 frames. ], batch size: 28, lr: 3.93e-03, grad_scale: 32.0 +2023-04-26 16:41:43,947 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.102e+02 1.748e+02 1.976e+02 2.307e+02 6.292e+02, threshold=3.952e+02, percent-clipped=1.0 +2023-04-26 16:41:44,087 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5414, 1.3708, 1.8528, 1.8928, 1.4638, 1.1665, 1.5946, 1.0523], + device='cuda:0'), covar=tensor([0.0777, 0.1013, 0.0587, 0.0814, 0.1018, 0.1439, 0.0818, 0.0999], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0077, 0.0076, 0.0069, 0.0080, 0.0096, 0.0083, 0.0079], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 16:41:57,437 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7663, 3.7939, 2.7969, 4.4812, 3.9495, 3.8755, 1.7505, 3.8812], + device='cuda:0'), covar=tensor([0.1765, 0.1249, 0.3080, 0.1711, 0.4670, 0.1963, 0.5914, 0.2255], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0222, 0.0256, 0.0312, 0.0305, 0.0257, 0.0275, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 16:42:06,253 INFO [finetune.py:976] (0/7) Epoch 5, batch 3150, loss[loss=0.2059, simple_loss=0.2659, pruned_loss=0.07297, over 4928.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2727, pruned_loss=0.07568, over 957035.66 frames. ], batch size: 38, lr: 3.93e-03, grad_scale: 32.0 +2023-04-26 16:42:27,117 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26084.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 16:42:46,433 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26102.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:42:54,362 INFO [finetune.py:976] (0/7) Epoch 5, batch 3200, loss[loss=0.2167, simple_loss=0.2771, pruned_loss=0.07811, over 4899.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.269, pruned_loss=0.0742, over 957074.16 frames. ], batch size: 32, lr: 3.93e-03, grad_scale: 32.0 +2023-04-26 16:43:09,264 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.183e+02 1.750e+02 2.081e+02 2.590e+02 4.901e+02, threshold=4.161e+02, percent-clipped=2.0 +2023-04-26 16:43:13,918 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=26132.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 16:43:31,951 INFO [finetune.py:976] (0/7) Epoch 5, batch 3250, loss[loss=0.1716, simple_loss=0.235, pruned_loss=0.05409, over 4796.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2695, pruned_loss=0.07423, over 955069.27 frames. ], batch size: 25, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:43:33,265 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26163.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:43:36,829 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26168.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:43:39,731 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5264, 1.0957, 4.1705, 3.9046, 3.6242, 3.9187, 3.8650, 3.6143], + device='cuda:0'), covar=tensor([0.7331, 0.6581, 0.1113, 0.1879, 0.1130, 0.1537, 0.1673, 0.1467], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0307, 0.0417, 0.0423, 0.0359, 0.0410, 0.0322, 0.0378], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:44:01,378 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9857, 1.2193, 1.3449, 1.4857, 1.4536, 1.5908, 1.4026, 1.4403], + device='cuda:0'), covar=tensor([0.7702, 0.9995, 0.9769, 0.8766, 0.9977, 1.4908, 1.0643, 0.9936], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0396, 0.0317, 0.0327, 0.0347, 0.0411, 0.0379, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 16:44:05,485 INFO [finetune.py:976] (0/7) Epoch 5, batch 3300, loss[loss=0.2624, simple_loss=0.3315, pruned_loss=0.09668, over 4862.00 frames. ], tot_loss[loss=0.2118, simple_loss=0.273, pruned_loss=0.07533, over 953939.30 frames. ], batch size: 31, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:44:07,390 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26214.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 16:44:16,106 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.343e+02 1.793e+02 2.255e+02 2.677e+02 5.857e+02, threshold=4.510e+02, percent-clipped=2.0 +2023-04-26 16:44:17,428 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26229.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:44:32,025 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26250.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:44:38,662 INFO [finetune.py:976] (0/7) Epoch 5, batch 3350, loss[loss=0.2215, simple_loss=0.2875, pruned_loss=0.07779, over 4878.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2753, pruned_loss=0.07584, over 955474.66 frames. ], batch size: 34, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:44:44,936 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.86 vs. limit=5.0 +2023-04-26 16:45:50,832 INFO [finetune.py:976] (0/7) Epoch 5, batch 3400, loss[loss=0.222, simple_loss=0.2848, pruned_loss=0.07963, over 4879.00 frames. ], tot_loss[loss=0.2158, simple_loss=0.2774, pruned_loss=0.07713, over 953360.86 frames. ], batch size: 43, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:45:50,974 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26311.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:46:13,229 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.738e+02 2.082e+02 2.437e+02 3.720e+02, threshold=4.164e+02, percent-clipped=0.0 +2023-04-26 16:46:56,401 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9049, 1.2721, 3.2723, 3.0402, 2.9776, 3.1843, 3.1948, 2.9204], + device='cuda:0'), covar=tensor([0.7209, 0.5313, 0.1466, 0.2084, 0.1465, 0.1918, 0.1832, 0.1788], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0309, 0.0421, 0.0426, 0.0361, 0.0413, 0.0325, 0.0382], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:46:56,944 INFO [finetune.py:976] (0/7) Epoch 5, batch 3450, loss[loss=0.2511, simple_loss=0.3157, pruned_loss=0.09326, over 4895.00 frames. ], tot_loss[loss=0.2147, simple_loss=0.2765, pruned_loss=0.07644, over 952808.26 frames. ], batch size: 36, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:48:03,179 INFO [finetune.py:976] (0/7) Epoch 5, batch 3500, loss[loss=0.1818, simple_loss=0.2479, pruned_loss=0.05787, over 4776.00 frames. ], tot_loss[loss=0.2129, simple_loss=0.2737, pruned_loss=0.07607, over 951712.83 frames. ], batch size: 27, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:48:05,094 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26414.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:48:25,047 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.307e+02 1.713e+02 2.095e+02 2.561e+02 6.592e+02, threshold=4.191e+02, percent-clipped=2.0 +2023-04-26 16:48:27,237 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-26 16:49:08,060 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-26 16:49:08,573 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26458.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:49:10,351 INFO [finetune.py:976] (0/7) Epoch 5, batch 3550, loss[loss=0.1633, simple_loss=0.2342, pruned_loss=0.04626, over 4791.00 frames. ], tot_loss[loss=0.2111, simple_loss=0.2708, pruned_loss=0.07569, over 951011.89 frames. ], batch size: 29, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:49:24,606 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26475.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:49:49,836 INFO [finetune.py:976] (0/7) Epoch 5, batch 3600, loss[loss=0.1899, simple_loss=0.2554, pruned_loss=0.06219, over 4935.00 frames. ], tot_loss[loss=0.2089, simple_loss=0.2683, pruned_loss=0.07474, over 950671.17 frames. ], batch size: 33, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:49:51,768 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26514.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 16:49:57,837 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26524.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:49:59,593 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.718e+02 2.043e+02 2.547e+02 4.174e+02, threshold=4.086e+02, percent-clipped=0.0 +2023-04-26 16:50:05,103 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-04-26 16:50:20,511 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-26 16:50:23,342 INFO [finetune.py:976] (0/7) Epoch 5, batch 3650, loss[loss=0.1637, simple_loss=0.2336, pruned_loss=0.04693, over 4821.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.2712, pruned_loss=0.07588, over 952800.43 frames. ], batch size: 30, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:50:24,020 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=26562.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 16:50:53,155 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26606.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:50:57,092 INFO [finetune.py:976] (0/7) Epoch 5, batch 3700, loss[loss=0.179, simple_loss=0.252, pruned_loss=0.05302, over 4902.00 frames. ], tot_loss[loss=0.2152, simple_loss=0.2753, pruned_loss=0.07752, over 953593.33 frames. ], batch size: 36, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:51:06,779 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.827e+02 2.264e+02 2.659e+02 6.887e+02, threshold=4.529e+02, percent-clipped=2.0 +2023-04-26 16:51:23,531 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5681, 1.7420, 1.8133, 2.3663, 2.5856, 2.2136, 2.0048, 1.9117], + device='cuda:0'), covar=tensor([0.1868, 0.2288, 0.2572, 0.1835, 0.1482, 0.2395, 0.2981, 0.2271], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0335, 0.0351, 0.0309, 0.0346, 0.0343, 0.0311, 0.0352], + device='cuda:0'), out_proj_covar=tensor([6.7874e-05, 7.1652e-05, 7.6054e-05, 6.4694e-05, 7.3393e-05, 7.4618e-05, + 6.7415e-05, 7.5899e-05], device='cuda:0') +2023-04-26 16:51:29,840 INFO [finetune.py:976] (0/7) Epoch 5, batch 3750, loss[loss=0.2437, simple_loss=0.3029, pruned_loss=0.09224, over 4854.00 frames. ], tot_loss[loss=0.2153, simple_loss=0.2759, pruned_loss=0.07738, over 952523.40 frames. ], batch size: 44, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:52:20,489 INFO [finetune.py:976] (0/7) Epoch 5, batch 3800, loss[loss=0.2087, simple_loss=0.2666, pruned_loss=0.07539, over 4801.00 frames. ], tot_loss[loss=0.217, simple_loss=0.278, pruned_loss=0.07798, over 953615.30 frames. ], batch size: 45, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:52:31,668 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.856e+02 2.180e+02 2.581e+02 5.516e+02, threshold=4.361e+02, percent-clipped=1.0 +2023-04-26 16:52:52,109 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26758.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:52:54,307 INFO [finetune.py:976] (0/7) Epoch 5, batch 3850, loss[loss=0.1844, simple_loss=0.2435, pruned_loss=0.06263, over 4931.00 frames. ], tot_loss[loss=0.215, simple_loss=0.2762, pruned_loss=0.07692, over 953265.04 frames. ], batch size: 33, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:53:00,818 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=26770.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:53:40,970 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=26806.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:53:44,435 INFO [finetune.py:976] (0/7) Epoch 5, batch 3900, loss[loss=0.177, simple_loss=0.2312, pruned_loss=0.06139, over 4720.00 frames. ], tot_loss[loss=0.2123, simple_loss=0.273, pruned_loss=0.07575, over 954407.52 frames. ], batch size: 23, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:54:03,742 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26824.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:54:05,468 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.758e+02 2.108e+02 2.612e+02 6.373e+02, threshold=4.215e+02, percent-clipped=3.0 +2023-04-26 16:54:50,051 INFO [finetune.py:976] (0/7) Epoch 5, batch 3950, loss[loss=0.1703, simple_loss=0.2417, pruned_loss=0.04946, over 4778.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2688, pruned_loss=0.07386, over 954396.92 frames. ], batch size: 28, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:55:08,484 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=26872.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:55:10,798 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-26 16:55:47,017 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26904.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 16:55:48,221 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=26906.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:55:50,674 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-26 16:55:51,739 INFO [finetune.py:976] (0/7) Epoch 5, batch 4000, loss[loss=0.2634, simple_loss=0.3333, pruned_loss=0.09673, over 4807.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.268, pruned_loss=0.07416, over 955904.74 frames. ], batch size: 45, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:56:02,922 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.248e+02 1.675e+02 2.014e+02 2.454e+02 6.687e+02, threshold=4.029e+02, percent-clipped=1.0 +2023-04-26 16:56:17,498 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=26950.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:56:19,903 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=26954.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:56:25,068 INFO [finetune.py:976] (0/7) Epoch 5, batch 4050, loss[loss=0.1893, simple_loss=0.2752, pruned_loss=0.05169, over 4899.00 frames. ], tot_loss[loss=0.2115, simple_loss=0.272, pruned_loss=0.07551, over 956006.63 frames. ], batch size: 37, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:56:28,117 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=26965.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 16:56:31,666 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4479, 1.6860, 1.2380, 0.9943, 1.1249, 1.0950, 1.1995, 1.0405], + device='cuda:0'), covar=tensor([0.1968, 0.1582, 0.2055, 0.2225, 0.2957, 0.2415, 0.1464, 0.2383], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0221, 0.0179, 0.0210, 0.0216, 0.0189, 0.0172, 0.0196], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 16:56:49,527 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-26 16:56:58,980 INFO [finetune.py:976] (0/7) Epoch 5, batch 4100, loss[loss=0.2419, simple_loss=0.2919, pruned_loss=0.09595, over 4854.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2765, pruned_loss=0.07765, over 955590.73 frames. ], batch size: 44, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:56:59,099 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27011.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:57:11,182 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9142, 1.8139, 2.0073, 1.9553, 1.9790, 1.6835, 1.7871, 1.8832], + device='cuda:0'), covar=tensor([1.3606, 1.7042, 2.2719, 2.4535, 1.4419, 2.7159, 2.7213, 2.1024], + device='cuda:0'), in_proj_covar=tensor([0.0422, 0.0455, 0.0542, 0.0562, 0.0452, 0.0475, 0.0485, 0.0487], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:57:20,559 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.859e+02 2.211e+02 2.694e+02 5.784e+02, threshold=4.421e+02, percent-clipped=3.0 +2023-04-26 16:58:04,488 INFO [finetune.py:976] (0/7) Epoch 5, batch 4150, loss[loss=0.2113, simple_loss=0.2758, pruned_loss=0.07337, over 4767.00 frames. ], tot_loss[loss=0.2169, simple_loss=0.2772, pruned_loss=0.07829, over 952247.09 frames. ], batch size: 54, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:58:16,091 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27070.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:58:47,719 INFO [finetune.py:976] (0/7) Epoch 5, batch 4200, loss[loss=0.1841, simple_loss=0.2511, pruned_loss=0.05854, over 4800.00 frames. ], tot_loss[loss=0.2163, simple_loss=0.2773, pruned_loss=0.07765, over 951031.25 frames. ], batch size: 25, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:58:53,010 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=27118.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:58:58,900 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.181e+01 1.721e+02 2.038e+02 2.503e+02 4.432e+02, threshold=4.077e+02, percent-clipped=1.0 +2023-04-26 16:59:13,612 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27148.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:59:20,319 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8223, 1.6617, 1.8958, 2.1431, 2.1907, 1.6970, 1.4084, 1.7286], + device='cuda:0'), covar=tensor([0.0827, 0.1062, 0.0628, 0.0613, 0.0515, 0.0965, 0.0833, 0.0705], + device='cuda:0'), in_proj_covar=tensor([0.0201, 0.0205, 0.0180, 0.0176, 0.0175, 0.0193, 0.0163, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 16:59:21,417 INFO [finetune.py:976] (0/7) Epoch 5, batch 4250, loss[loss=0.2265, simple_loss=0.2873, pruned_loss=0.08288, over 4786.00 frames. ], tot_loss[loss=0.214, simple_loss=0.2745, pruned_loss=0.07676, over 951014.56 frames. ], batch size: 51, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 16:59:37,459 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8133, 2.2664, 2.8360, 3.2993, 2.6587, 2.2338, 2.0109, 2.6548], + device='cuda:0'), covar=tensor([0.3499, 0.3776, 0.1702, 0.3109, 0.3352, 0.2822, 0.4820, 0.2559], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0261, 0.0222, 0.0331, 0.0221, 0.0231, 0.0245, 0.0196], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 16:59:54,265 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27209.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 16:59:55,346 INFO [finetune.py:976] (0/7) Epoch 5, batch 4300, loss[loss=0.2229, simple_loss=0.2793, pruned_loss=0.08322, over 4902.00 frames. ], tot_loss[loss=0.2106, simple_loss=0.2706, pruned_loss=0.07529, over 951163.60 frames. ], batch size: 35, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:00:17,880 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.798e+02 2.267e+02 2.763e+02 5.468e+02, threshold=4.535e+02, percent-clipped=5.0 +2023-04-26 17:01:04,147 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27260.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 17:01:04,675 INFO [finetune.py:976] (0/7) Epoch 5, batch 4350, loss[loss=0.2289, simple_loss=0.28, pruned_loss=0.08889, over 4743.00 frames. ], tot_loss[loss=0.207, simple_loss=0.267, pruned_loss=0.07349, over 953407.21 frames. ], batch size: 27, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:01:11,485 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.2484, 3.2362, 2.3525, 3.8003, 3.3253, 3.3134, 1.3130, 3.2788], + device='cuda:0'), covar=tensor([0.2045, 0.1549, 0.3828, 0.2669, 0.4245, 0.2245, 0.6014, 0.2483], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0223, 0.0256, 0.0314, 0.0306, 0.0256, 0.0277, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:01:46,637 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27306.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:01:49,556 INFO [finetune.py:976] (0/7) Epoch 5, batch 4400, loss[loss=0.2415, simple_loss=0.3089, pruned_loss=0.08704, over 4821.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2689, pruned_loss=0.07431, over 950698.32 frames. ], batch size: 39, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:02:00,137 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.042e+02 1.703e+02 2.157e+02 2.468e+02 4.465e+02, threshold=4.314e+02, percent-clipped=0.0 +2023-04-26 17:02:23,093 INFO [finetune.py:976] (0/7) Epoch 5, batch 4450, loss[loss=0.186, simple_loss=0.233, pruned_loss=0.06952, over 4018.00 frames. ], tot_loss[loss=0.2134, simple_loss=0.2738, pruned_loss=0.07652, over 946881.78 frames. ], batch size: 17, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:02:28,752 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2675, 1.5050, 1.3766, 1.7241, 1.5290, 1.7708, 1.4103, 2.8211], + device='cuda:0'), covar=tensor([0.0684, 0.0828, 0.0860, 0.1173, 0.0690, 0.0705, 0.0869, 0.0324], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0040, 0.0041, 0.0045, 0.0041, 0.0041, 0.0040, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 17:02:56,797 INFO [finetune.py:976] (0/7) Epoch 5, batch 4500, loss[loss=0.1882, simple_loss=0.2574, pruned_loss=0.0595, over 4921.00 frames. ], tot_loss[loss=0.2149, simple_loss=0.2751, pruned_loss=0.07734, over 947982.36 frames. ], batch size: 38, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:03:17,442 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.778e+02 2.173e+02 2.607e+02 4.915e+02, threshold=4.346e+02, percent-clipped=1.0 +2023-04-26 17:04:02,851 INFO [finetune.py:976] (0/7) Epoch 5, batch 4550, loss[loss=0.1944, simple_loss=0.2566, pruned_loss=0.06604, over 4730.00 frames. ], tot_loss[loss=0.2154, simple_loss=0.2758, pruned_loss=0.07754, over 947486.29 frames. ], batch size: 54, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:04:31,477 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=27504.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:04:36,157 INFO [finetune.py:976] (0/7) Epoch 5, batch 4600, loss[loss=0.1851, simple_loss=0.2519, pruned_loss=0.05916, over 4886.00 frames. ], tot_loss[loss=0.2124, simple_loss=0.2734, pruned_loss=0.07567, over 947295.51 frames. ], batch size: 43, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:04:46,279 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.803e+02 2.105e+02 2.477e+02 5.679e+02, threshold=4.210e+02, percent-clipped=3.0 +2023-04-26 17:04:55,952 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6718, 1.8344, 1.0504, 1.3464, 1.9277, 1.5430, 1.4110, 1.5657], + device='cuda:0'), covar=tensor([0.0524, 0.0385, 0.0379, 0.0594, 0.0271, 0.0569, 0.0529, 0.0590], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0024, 0.0031, 0.0021, 0.0030, 0.0030, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0049, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 17:05:03,606 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7159, 1.7731, 1.7847, 2.4503, 2.6933, 2.2896, 2.0919, 1.9626], + device='cuda:0'), covar=tensor([0.1798, 0.2185, 0.2173, 0.2227, 0.1392, 0.1831, 0.2563, 0.1874], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0333, 0.0349, 0.0307, 0.0342, 0.0339, 0.0307, 0.0350], + device='cuda:0'), out_proj_covar=tensor([6.6795e-05, 7.1243e-05, 7.5603e-05, 6.4027e-05, 7.2450e-05, 7.3521e-05, + 6.6596e-05, 7.5305e-05], device='cuda:0') +2023-04-26 17:05:08,944 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27560.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 17:05:09,447 INFO [finetune.py:976] (0/7) Epoch 5, batch 4650, loss[loss=0.2329, simple_loss=0.2862, pruned_loss=0.08981, over 4298.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2706, pruned_loss=0.0746, over 947537.21 frames. ], batch size: 65, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:05:21,584 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-26 17:05:25,060 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8398, 3.6961, 2.6885, 4.4488, 3.8249, 3.7999, 1.6857, 3.7663], + device='cuda:0'), covar=tensor([0.1727, 0.1355, 0.3812, 0.1371, 0.3278, 0.1890, 0.6492, 0.2361], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0223, 0.0257, 0.0314, 0.0307, 0.0259, 0.0278, 0.0279], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:05:35,447 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6095, 1.5546, 0.8842, 1.3206, 1.7372, 1.4996, 1.3959, 1.4264], + device='cuda:0'), covar=tensor([0.0534, 0.0398, 0.0393, 0.0597, 0.0274, 0.0538, 0.0552, 0.0616], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0024, 0.0030, 0.0021, 0.0030, 0.0030, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0049, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 17:05:40,056 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27606.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:05:41,303 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=27608.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 17:05:43,535 INFO [finetune.py:976] (0/7) Epoch 5, batch 4700, loss[loss=0.223, simple_loss=0.2739, pruned_loss=0.08607, over 4817.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2687, pruned_loss=0.07404, over 949345.24 frames. ], batch size: 41, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:05:54,152 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.680e+02 2.058e+02 2.502e+02 5.893e+02, threshold=4.117e+02, percent-clipped=4.0 +2023-04-26 17:06:22,131 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8513, 2.1918, 1.2464, 1.6218, 2.3061, 1.7899, 1.6823, 1.8255], + device='cuda:0'), covar=tensor([0.0550, 0.0364, 0.0344, 0.0599, 0.0251, 0.0561, 0.0553, 0.0588], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 17:06:23,326 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=27654.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:06:34,207 INFO [finetune.py:976] (0/7) Epoch 5, batch 4750, loss[loss=0.2357, simple_loss=0.2928, pruned_loss=0.08931, over 4819.00 frames. ], tot_loss[loss=0.2084, simple_loss=0.2683, pruned_loss=0.07429, over 950316.81 frames. ], batch size: 45, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:07:40,905 INFO [finetune.py:976] (0/7) Epoch 5, batch 4800, loss[loss=0.2043, simple_loss=0.2592, pruned_loss=0.07466, over 4747.00 frames. ], tot_loss[loss=0.21, simple_loss=0.2701, pruned_loss=0.075, over 947732.80 frames. ], batch size: 27, lr: 3.93e-03, grad_scale: 16.0 +2023-04-26 17:07:48,575 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=27714.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 17:08:02,774 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.360e+02 1.880e+02 2.179e+02 2.503e+02 4.628e+02, threshold=4.358e+02, percent-clipped=2.0 +2023-04-26 17:08:33,561 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6615, 1.2620, 4.3963, 4.1392, 3.9761, 4.2015, 4.1111, 3.9073], + device='cuda:0'), covar=tensor([0.6898, 0.6470, 0.0983, 0.1559, 0.1017, 0.1788, 0.1422, 0.1451], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0311, 0.0418, 0.0424, 0.0356, 0.0411, 0.0322, 0.0379], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:08:41,284 INFO [finetune.py:976] (0/7) Epoch 5, batch 4850, loss[loss=0.2276, simple_loss=0.2963, pruned_loss=0.07944, over 4806.00 frames. ], tot_loss[loss=0.2127, simple_loss=0.2737, pruned_loss=0.07584, over 950580.69 frames. ], batch size: 45, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:08:49,737 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7765, 2.3772, 1.7419, 1.5332, 1.2725, 1.3712, 1.7161, 1.2243], + device='cuda:0'), covar=tensor([0.1927, 0.1583, 0.1842, 0.2231, 0.2994, 0.2385, 0.1413, 0.2515], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0219, 0.0177, 0.0207, 0.0213, 0.0187, 0.0170, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 17:08:50,943 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=27775.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 17:08:57,491 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9198, 1.1754, 4.7069, 4.3969, 4.1755, 4.3723, 4.2106, 4.1920], + device='cuda:0'), covar=tensor([0.6846, 0.6512, 0.1064, 0.1801, 0.1030, 0.1122, 0.2206, 0.1555], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0310, 0.0418, 0.0423, 0.0356, 0.0411, 0.0321, 0.0378], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:09:14,390 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=27804.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:09:18,622 INFO [finetune.py:976] (0/7) Epoch 5, batch 4900, loss[loss=0.1961, simple_loss=0.2712, pruned_loss=0.06053, over 4841.00 frames. ], tot_loss[loss=0.2136, simple_loss=0.2752, pruned_loss=0.07602, over 950698.77 frames. ], batch size: 49, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:09:27,507 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1006, 1.4705, 1.9623, 2.3152, 1.8596, 1.4644, 1.1681, 1.6812], + device='cuda:0'), covar=tensor([0.4010, 0.4567, 0.1895, 0.3403, 0.3768, 0.3443, 0.5894, 0.3475], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0260, 0.0221, 0.0331, 0.0220, 0.0230, 0.0245, 0.0196], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:09:30,230 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.253e+02 1.856e+02 2.309e+02 2.750e+02 8.138e+02, threshold=4.618e+02, percent-clipped=6.0 +2023-04-26 17:09:31,051 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-26 17:09:46,103 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=27852.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:09:52,540 INFO [finetune.py:976] (0/7) Epoch 5, batch 4950, loss[loss=0.2232, simple_loss=0.2858, pruned_loss=0.08034, over 4914.00 frames. ], tot_loss[loss=0.2135, simple_loss=0.2756, pruned_loss=0.07574, over 953278.03 frames. ], batch size: 38, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:10:05,748 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4661, 1.8007, 1.3259, 0.9800, 1.1734, 1.1314, 1.3027, 1.1588], + device='cuda:0'), covar=tensor([0.2095, 0.1404, 0.1966, 0.2329, 0.2886, 0.2268, 0.1463, 0.2320], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0220, 0.0177, 0.0208, 0.0213, 0.0187, 0.0170, 0.0194], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 17:10:30,556 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5932, 1.4803, 0.8734, 1.2994, 1.6459, 1.4783, 1.3582, 1.4069], + device='cuda:0'), covar=tensor([0.0526, 0.0416, 0.0408, 0.0581, 0.0302, 0.0527, 0.0561, 0.0619], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 17:10:31,676 INFO [finetune.py:976] (0/7) Epoch 5, batch 5000, loss[loss=0.2021, simple_loss=0.2675, pruned_loss=0.06834, over 4758.00 frames. ], tot_loss[loss=0.2116, simple_loss=0.2735, pruned_loss=0.07479, over 952986.33 frames. ], batch size: 28, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:10:42,359 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.656e+02 1.985e+02 2.433e+02 4.076e+02, threshold=3.970e+02, percent-clipped=0.0 +2023-04-26 17:10:52,493 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1612, 1.5442, 1.9956, 2.3066, 1.9269, 1.4604, 1.1679, 1.7727], + device='cuda:0'), covar=tensor([0.3476, 0.3958, 0.1798, 0.2782, 0.3227, 0.3318, 0.5454, 0.2965], + device='cuda:0'), in_proj_covar=tensor([0.0276, 0.0259, 0.0220, 0.0329, 0.0219, 0.0230, 0.0244, 0.0195], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:11:00,922 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7268, 1.4103, 1.8675, 1.9109, 1.5261, 1.3018, 1.6169, 0.9022], + device='cuda:0'), covar=tensor([0.0597, 0.0945, 0.0609, 0.0903, 0.1033, 0.1625, 0.0757, 0.1264], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0077, 0.0076, 0.0070, 0.0080, 0.0097, 0.0083, 0.0078], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 17:11:04,347 INFO [finetune.py:976] (0/7) Epoch 5, batch 5050, loss[loss=0.1527, simple_loss=0.2143, pruned_loss=0.04554, over 4804.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2712, pruned_loss=0.0748, over 953365.51 frames. ], batch size: 25, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:11:32,680 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-26 17:11:43,078 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-04-26 17:11:52,453 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-28000.pt +2023-04-26 17:12:05,263 INFO [finetune.py:976] (0/7) Epoch 5, batch 5100, loss[loss=0.2152, simple_loss=0.2656, pruned_loss=0.08244, over 4777.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2674, pruned_loss=0.07364, over 955172.34 frames. ], batch size: 54, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:12:27,655 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.777e+02 2.079e+02 2.348e+02 6.496e+02, threshold=4.158e+02, percent-clipped=5.0 +2023-04-26 17:12:48,108 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1697, 2.4984, 1.0525, 1.4005, 1.8761, 1.3159, 3.3878, 1.8084], + device='cuda:0'), covar=tensor([0.0655, 0.0613, 0.0794, 0.1332, 0.0537, 0.1007, 0.0305, 0.0645], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0070, 0.0052, 0.0049, 0.0054, 0.0054, 0.0082, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 17:13:12,919 INFO [finetune.py:976] (0/7) Epoch 5, batch 5150, loss[loss=0.2638, simple_loss=0.3186, pruned_loss=0.1046, over 4908.00 frames. ], tot_loss[loss=0.2078, simple_loss=0.2676, pruned_loss=0.07404, over 953361.10 frames. ], batch size: 36, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:13:31,480 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28070.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 17:13:57,535 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4920, 3.0738, 0.9236, 1.5086, 2.0809, 1.6045, 4.1512, 1.9709], + device='cuda:0'), covar=tensor([0.0678, 0.0823, 0.1074, 0.1501, 0.0657, 0.1043, 0.0294, 0.0698], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0070, 0.0052, 0.0049, 0.0054, 0.0054, 0.0083, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0008, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 17:14:09,045 INFO [finetune.py:976] (0/7) Epoch 5, batch 5200, loss[loss=0.2595, simple_loss=0.3139, pruned_loss=0.1026, over 4851.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2722, pruned_loss=0.07508, over 955979.67 frames. ], batch size: 31, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:14:19,886 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.281e+02 1.900e+02 2.150e+02 2.501e+02 5.102e+02, threshold=4.301e+02, percent-clipped=1.0 +2023-04-26 17:14:33,702 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 17:14:42,339 INFO [finetune.py:976] (0/7) Epoch 5, batch 5250, loss[loss=0.2103, simple_loss=0.2744, pruned_loss=0.07315, over 4902.00 frames. ], tot_loss[loss=0.2126, simple_loss=0.2738, pruned_loss=0.0757, over 954877.82 frames. ], batch size: 36, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:15:16,326 INFO [finetune.py:976] (0/7) Epoch 5, batch 5300, loss[loss=0.211, simple_loss=0.2761, pruned_loss=0.073, over 4798.00 frames. ], tot_loss[loss=0.2137, simple_loss=0.2751, pruned_loss=0.0762, over 955423.44 frames. ], batch size: 29, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:15:27,005 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.419e+02 1.781e+02 2.085e+02 2.383e+02 5.799e+02, threshold=4.171e+02, percent-clipped=2.0 +2023-04-26 17:15:28,337 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28229.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:15:46,188 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28255.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 17:15:49,729 INFO [finetune.py:976] (0/7) Epoch 5, batch 5350, loss[loss=0.2218, simple_loss=0.2733, pruned_loss=0.08517, over 4833.00 frames. ], tot_loss[loss=0.213, simple_loss=0.2745, pruned_loss=0.07578, over 954130.23 frames. ], batch size: 30, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:16:07,257 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28287.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:16:10,010 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28290.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:16:23,679 INFO [finetune.py:976] (0/7) Epoch 5, batch 5400, loss[loss=0.1712, simple_loss=0.2356, pruned_loss=0.05338, over 4921.00 frames. ], tot_loss[loss=0.2092, simple_loss=0.2708, pruned_loss=0.07382, over 954405.78 frames. ], batch size: 33, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:16:27,327 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28316.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 17:16:28,485 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1387, 2.6477, 1.0780, 1.4452, 1.9838, 1.1576, 3.4497, 1.6954], + device='cuda:0'), covar=tensor([0.0660, 0.0742, 0.0876, 0.1212, 0.0536, 0.1007, 0.0272, 0.0657], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0070, 0.0052, 0.0049, 0.0054, 0.0054, 0.0082, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 17:16:34,348 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.689e+02 2.005e+02 2.420e+02 4.760e+02, threshold=4.009e+02, percent-clipped=1.0 +2023-04-26 17:16:45,732 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1272, 1.2669, 1.4322, 1.6165, 1.5871, 1.7711, 1.5418, 1.5327], + device='cuda:0'), covar=tensor([0.5738, 0.9065, 0.7543, 0.6761, 0.8243, 1.2224, 0.8812, 0.8445], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0395, 0.0318, 0.0327, 0.0347, 0.0413, 0.0378, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:16:49,200 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28348.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:16:57,452 INFO [finetune.py:976] (0/7) Epoch 5, batch 5450, loss[loss=0.1952, simple_loss=0.261, pruned_loss=0.06471, over 4837.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2683, pruned_loss=0.07302, over 956395.95 frames. ], batch size: 33, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:17:02,978 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7346, 1.9607, 1.2494, 1.5012, 2.0140, 1.6533, 1.5741, 1.6199], + device='cuda:0'), covar=tensor([0.0471, 0.0315, 0.0378, 0.0477, 0.0285, 0.0490, 0.0446, 0.0515], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 17:17:03,569 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28370.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 17:17:19,194 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9411, 1.8813, 2.1183, 2.3208, 2.4109, 1.8491, 1.5792, 2.1019], + device='cuda:0'), covar=tensor([0.1018, 0.0990, 0.0609, 0.0656, 0.0576, 0.1009, 0.0903, 0.0590], + device='cuda:0'), in_proj_covar=tensor([0.0201, 0.0204, 0.0180, 0.0176, 0.0176, 0.0192, 0.0164, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:17:37,439 INFO [finetune.py:976] (0/7) Epoch 5, batch 5500, loss[loss=0.2084, simple_loss=0.2646, pruned_loss=0.07616, over 4868.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2655, pruned_loss=0.07253, over 954438.20 frames. ], batch size: 44, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:17:47,031 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=28418.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 17:17:59,138 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.103e+02 1.711e+02 2.116e+02 2.529e+02 5.388e+02, threshold=4.231e+02, percent-clipped=4.0 +2023-04-26 17:18:31,814 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-26 17:18:39,145 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4919, 3.5026, 0.8654, 1.8102, 2.1031, 2.4313, 2.0338, 0.9499], + device='cuda:0'), covar=tensor([0.1418, 0.0885, 0.2043, 0.1469, 0.1090, 0.1177, 0.1418, 0.2158], + device='cuda:0'), in_proj_covar=tensor([0.0124, 0.0267, 0.0148, 0.0130, 0.0141, 0.0163, 0.0126, 0.0130], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 17:18:49,581 INFO [finetune.py:976] (0/7) Epoch 5, batch 5550, loss[loss=0.243, simple_loss=0.2859, pruned_loss=0.1001, over 4864.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2671, pruned_loss=0.0735, over 954656.34 frames. ], batch size: 31, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:19:09,805 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28486.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:19:14,450 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28492.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:19:15,104 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3314, 1.5713, 1.5818, 1.7206, 1.6284, 1.7627, 1.7076, 1.6963], + device='cuda:0'), covar=tensor([0.6645, 1.0899, 0.9030, 0.8042, 0.9643, 1.3962, 1.1120, 0.9446], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0398, 0.0319, 0.0329, 0.0349, 0.0415, 0.0380, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:19:21,808 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-26 17:19:25,625 INFO [finetune.py:976] (0/7) Epoch 5, batch 5600, loss[loss=0.2447, simple_loss=0.2979, pruned_loss=0.09576, over 4745.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.2707, pruned_loss=0.07447, over 954055.14 frames. ], batch size: 54, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:19:34,905 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.757e+02 2.198e+02 2.682e+02 4.335e+02, threshold=4.395e+02, percent-clipped=1.0 +2023-04-26 17:19:42,437 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1399, 2.5696, 0.9981, 1.5021, 2.0539, 1.3598, 3.5447, 1.9609], + device='cuda:0'), covar=tensor([0.0637, 0.0644, 0.0836, 0.1413, 0.0515, 0.0937, 0.0297, 0.0576], + device='cuda:0'), in_proj_covar=tensor([0.0054, 0.0070, 0.0052, 0.0049, 0.0054, 0.0054, 0.0082, 0.0053], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 17:19:47,083 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28547.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:19:51,018 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28553.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:19:55,612 INFO [finetune.py:976] (0/7) Epoch 5, batch 5650, loss[loss=0.1719, simple_loss=0.2486, pruned_loss=0.04758, over 4838.00 frames. ], tot_loss[loss=0.2114, simple_loss=0.2732, pruned_loss=0.07482, over 954705.91 frames. ], batch size: 47, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:20:04,573 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-26 17:20:09,690 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28585.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:20:25,136 INFO [finetune.py:976] (0/7) Epoch 5, batch 5700, loss[loss=0.2006, simple_loss=0.2543, pruned_loss=0.07344, over 3995.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2695, pruned_loss=0.07405, over 940020.74 frames. ], batch size: 17, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:20:25,177 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28611.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 17:20:34,245 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-26 17:20:34,602 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.668e+02 2.021e+02 2.528e+02 3.624e+02, threshold=4.042e+02, percent-clipped=0.0 +2023-04-26 17:20:41,848 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-5.pt +2023-04-26 17:20:58,934 INFO [finetune.py:976] (0/7) Epoch 6, batch 0, loss[loss=0.2145, simple_loss=0.2792, pruned_loss=0.07496, over 4912.00 frames. ], tot_loss[loss=0.2145, simple_loss=0.2792, pruned_loss=0.07496, over 4912.00 frames. ], batch size: 33, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:20:58,935 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 17:21:02,064 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1777, 1.6265, 1.4028, 1.8116, 1.5708, 1.9139, 1.3986, 3.0214], + device='cuda:0'), covar=tensor([0.0691, 0.0787, 0.0768, 0.1164, 0.0653, 0.0453, 0.0720, 0.0199], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0040, 0.0041, 0.0045, 0.0041, 0.0040, 0.0040, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 17:21:13,393 INFO [finetune.py:1010] (0/7) Epoch 6, validation: loss=0.1605, simple_loss=0.2337, pruned_loss=0.04366, over 2265189.00 frames. +2023-04-26 17:21:13,394 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 17:21:19,202 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28643.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:21:19,247 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5492, 1.5212, 1.7108, 1.9110, 1.9958, 1.5101, 1.0782, 1.6667], + device='cuda:0'), covar=tensor([0.0999, 0.1166, 0.0751, 0.0698, 0.0621, 0.1047, 0.1113, 0.0647], + device='cuda:0'), in_proj_covar=tensor([0.0202, 0.0204, 0.0181, 0.0177, 0.0177, 0.0192, 0.0165, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:21:21,106 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3138, 2.2734, 2.5838, 2.8100, 2.7695, 2.1568, 1.8339, 2.3393], + device='cuda:0'), covar=tensor([0.0994, 0.0944, 0.0506, 0.0656, 0.0577, 0.1063, 0.0976, 0.0610], + device='cuda:0'), in_proj_covar=tensor([0.0202, 0.0204, 0.0181, 0.0177, 0.0177, 0.0192, 0.0165, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:21:29,874 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6880, 1.1506, 1.2483, 1.4003, 1.9269, 1.5473, 1.2513, 1.1944], + device='cuda:0'), covar=tensor([0.1749, 0.1858, 0.2570, 0.1783, 0.1069, 0.1759, 0.2192, 0.2501], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0331, 0.0349, 0.0305, 0.0341, 0.0336, 0.0305, 0.0351], + device='cuda:0'), out_proj_covar=tensor([6.6652e-05, 7.0850e-05, 7.5417e-05, 6.3753e-05, 7.2213e-05, 7.2886e-05, + 6.6233e-05, 7.5610e-05], device='cuda:0') +2023-04-26 17:21:59,788 INFO [finetune.py:976] (0/7) Epoch 6, batch 50, loss[loss=0.2472, simple_loss=0.303, pruned_loss=0.09572, over 4875.00 frames. ], tot_loss[loss=0.2183, simple_loss=0.2769, pruned_loss=0.07982, over 216273.13 frames. ], batch size: 32, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:22:24,839 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.869e+02 2.241e+02 2.636e+02 5.025e+02, threshold=4.483e+02, percent-clipped=7.0 +2023-04-26 17:22:33,639 INFO [finetune.py:976] (0/7) Epoch 6, batch 100, loss[loss=0.1815, simple_loss=0.2444, pruned_loss=0.05926, over 4909.00 frames. ], tot_loss[loss=0.2125, simple_loss=0.2713, pruned_loss=0.0768, over 380334.85 frames. ], batch size: 36, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:23:05,743 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1267, 1.3690, 1.2588, 1.6638, 1.4381, 1.6013, 1.2768, 2.4925], + device='cuda:0'), covar=tensor([0.0644, 0.0821, 0.0832, 0.1185, 0.0655, 0.0512, 0.0727, 0.0215], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0045, 0.0041, 0.0040, 0.0040, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 17:23:06,830 INFO [finetune.py:976] (0/7) Epoch 6, batch 150, loss[loss=0.1917, simple_loss=0.2552, pruned_loss=0.06404, over 4879.00 frames. ], tot_loss[loss=0.2079, simple_loss=0.2659, pruned_loss=0.07496, over 507437.93 frames. ], batch size: 34, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:23:36,887 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.735e+02 2.162e+02 2.514e+02 4.413e+02, threshold=4.325e+02, percent-clipped=0.0 +2023-04-26 17:23:57,270 INFO [finetune.py:976] (0/7) Epoch 6, batch 200, loss[loss=0.2083, simple_loss=0.2637, pruned_loss=0.07641, over 4814.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2639, pruned_loss=0.07431, over 604198.43 frames. ], batch size: 25, lr: 3.92e-03, grad_scale: 32.0 +2023-04-26 17:23:59,203 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28842.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:24:08,157 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=28848.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:24:22,299 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=28861.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:24:55,331 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28885.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:25:03,860 INFO [finetune.py:976] (0/7) Epoch 6, batch 250, loss[loss=0.1961, simple_loss=0.2707, pruned_loss=0.06071, over 4718.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.269, pruned_loss=0.07595, over 681390.05 frames. ], batch size: 59, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:25:15,811 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4920, 3.3788, 2.6782, 3.0784, 2.4542, 2.8453, 2.9002, 2.5797], + device='cuda:0'), covar=tensor([0.2622, 0.1916, 0.0955, 0.1502, 0.2984, 0.1548, 0.2058, 0.3058], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0323, 0.0235, 0.0298, 0.0317, 0.0278, 0.0266, 0.0290], + device='cuda:0'), out_proj_covar=tensor([1.2401e-04, 1.3126e-04, 9.5598e-05, 1.1960e-04, 1.3056e-04, 1.1257e-04, + 1.0937e-04, 1.1682e-04], device='cuda:0') +2023-04-26 17:25:18,978 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-26 17:25:29,049 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28911.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 17:25:40,820 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=28922.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:25:46,465 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.358e+02 1.791e+02 2.229e+02 2.957e+02 8.157e+02, threshold=4.458e+02, percent-clipped=7.0 +2023-04-26 17:25:54,886 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=28933.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:25:59,500 INFO [finetune.py:976] (0/7) Epoch 6, batch 300, loss[loss=0.2363, simple_loss=0.294, pruned_loss=0.0893, over 4933.00 frames. ], tot_loss[loss=0.2151, simple_loss=0.2746, pruned_loss=0.07775, over 742259.89 frames. ], batch size: 33, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:26:08,041 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=28943.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:26:28,688 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=28959.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 17:27:05,111 INFO [finetune.py:976] (0/7) Epoch 6, batch 350, loss[loss=0.2294, simple_loss=0.2806, pruned_loss=0.08909, over 4791.00 frames. ], tot_loss[loss=0.2159, simple_loss=0.2764, pruned_loss=0.07766, over 789333.86 frames. ], batch size: 51, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:27:11,873 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=28991.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:27:24,956 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29002.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:27:53,123 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.828e+02 2.192e+02 2.525e+02 3.973e+02, threshold=4.385e+02, percent-clipped=0.0 +2023-04-26 17:27:59,802 INFO [finetune.py:976] (0/7) Epoch 6, batch 400, loss[loss=0.2359, simple_loss=0.2873, pruned_loss=0.0922, over 4844.00 frames. ], tot_loss[loss=0.2143, simple_loss=0.2756, pruned_loss=0.07647, over 825625.90 frames. ], batch size: 30, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:28:01,668 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-26 17:28:16,902 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29063.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:28:32,158 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.33 vs. limit=5.0 +2023-04-26 17:28:33,175 INFO [finetune.py:976] (0/7) Epoch 6, batch 450, loss[loss=0.2027, simple_loss=0.2613, pruned_loss=0.07208, over 4726.00 frames. ], tot_loss[loss=0.2103, simple_loss=0.2721, pruned_loss=0.07424, over 854262.50 frames. ], batch size: 54, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:28:59,673 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.667e+02 1.998e+02 2.325e+02 3.966e+02, threshold=3.996e+02, percent-clipped=0.0 +2023-04-26 17:29:06,385 INFO [finetune.py:976] (0/7) Epoch 6, batch 500, loss[loss=0.1999, simple_loss=0.258, pruned_loss=0.07084, over 4709.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2686, pruned_loss=0.07253, over 877381.83 frames. ], batch size: 23, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:29:08,309 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29142.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:29:13,383 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29148.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:29:37,376 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5996, 1.1814, 1.2632, 1.3377, 1.8912, 1.4530, 1.1316, 1.1867], + device='cuda:0'), covar=tensor([0.1365, 0.1520, 0.1805, 0.1315, 0.0685, 0.1610, 0.2306, 0.1921], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0328, 0.0347, 0.0304, 0.0338, 0.0333, 0.0304, 0.0347], + device='cuda:0'), out_proj_covar=tensor([6.6162e-05, 6.9969e-05, 7.4965e-05, 6.3463e-05, 7.1518e-05, 7.2190e-05, + 6.5961e-05, 7.4830e-05], device='cuda:0') +2023-04-26 17:29:39,682 INFO [finetune.py:976] (0/7) Epoch 6, batch 550, loss[loss=0.2508, simple_loss=0.3093, pruned_loss=0.09612, over 4805.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.2658, pruned_loss=0.07159, over 896416.50 frames. ], batch size: 45, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:29:40,347 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=29190.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:29:44,456 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=29196.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:29:59,117 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29217.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:30:06,230 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.833e+02 2.160e+02 2.601e+02 5.201e+02, threshold=4.320e+02, percent-clipped=1.0 +2023-04-26 17:30:11,173 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29236.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:30:12,891 INFO [finetune.py:976] (0/7) Epoch 6, batch 600, loss[loss=0.2057, simple_loss=0.2675, pruned_loss=0.07192, over 4912.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2679, pruned_loss=0.07272, over 911223.62 frames. ], batch size: 37, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:30:24,742 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-04-26 17:30:29,548 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5339, 1.5054, 4.1616, 3.8967, 3.6869, 3.9205, 3.9977, 3.6230], + device='cuda:0'), covar=tensor([0.7114, 0.5457, 0.1148, 0.1918, 0.1131, 0.1435, 0.1288, 0.1599], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0309, 0.0421, 0.0425, 0.0358, 0.0415, 0.0322, 0.0379], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:30:36,782 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2824, 2.7071, 1.0512, 1.4550, 2.0164, 1.2579, 3.7001, 1.8649], + device='cuda:0'), covar=tensor([0.0637, 0.0737, 0.0822, 0.1298, 0.0585, 0.1046, 0.0363, 0.0631], + device='cuda:0'), in_proj_covar=tensor([0.0053, 0.0070, 0.0051, 0.0049, 0.0053, 0.0054, 0.0081, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 17:30:47,030 INFO [finetune.py:976] (0/7) Epoch 6, batch 650, loss[loss=0.1513, simple_loss=0.2198, pruned_loss=0.04138, over 4773.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.2706, pruned_loss=0.07386, over 921013.10 frames. ], batch size: 27, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:30:57,010 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3444, 1.7511, 1.3171, 1.9879, 1.6501, 2.1869, 1.4240, 4.2977], + device='cuda:0'), covar=tensor([0.0651, 0.0750, 0.0828, 0.1194, 0.0665, 0.0633, 0.0784, 0.0120], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0040, 0.0041, 0.0045, 0.0041, 0.0040, 0.0040, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 17:30:57,645 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29297.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:30:59,451 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.3435, 1.4090, 1.4123, 1.0227, 1.3330, 1.2208, 1.7798, 1.1629], + device='cuda:0'), covar=tensor([0.3715, 0.1610, 0.5025, 0.2671, 0.1671, 0.2257, 0.1662, 0.4898], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0359, 0.0441, 0.0371, 0.0399, 0.0389, 0.0394, 0.0426], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:31:42,443 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.237e+02 1.829e+02 2.148e+02 2.649e+02 3.979e+02, threshold=4.296e+02, percent-clipped=0.0 +2023-04-26 17:32:01,094 INFO [finetune.py:976] (0/7) Epoch 6, batch 700, loss[loss=0.2139, simple_loss=0.2805, pruned_loss=0.07369, over 4860.00 frames. ], tot_loss[loss=0.2091, simple_loss=0.271, pruned_loss=0.07362, over 927242.08 frames. ], batch size: 34, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:32:25,027 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29358.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:33:07,833 INFO [finetune.py:976] (0/7) Epoch 6, batch 750, loss[loss=0.2588, simple_loss=0.3058, pruned_loss=0.1059, over 4886.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2713, pruned_loss=0.0737, over 932666.91 frames. ], batch size: 32, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:34:03,539 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.229e+02 1.738e+02 2.111e+02 2.599e+02 4.246e+02, threshold=4.221e+02, percent-clipped=0.0 +2023-04-26 17:34:14,519 INFO [finetune.py:976] (0/7) Epoch 6, batch 800, loss[loss=0.2101, simple_loss=0.2709, pruned_loss=0.07464, over 4849.00 frames. ], tot_loss[loss=0.2098, simple_loss=0.272, pruned_loss=0.0738, over 938692.79 frames. ], batch size: 44, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:34:29,661 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6581, 2.1256, 1.6479, 1.5539, 1.3069, 1.3111, 1.6355, 1.2716], + device='cuda:0'), covar=tensor([0.2055, 0.1687, 0.1886, 0.2110, 0.2888, 0.2278, 0.1372, 0.2325], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0220, 0.0176, 0.0207, 0.0213, 0.0186, 0.0169, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 17:34:30,237 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3620, 1.8391, 1.5377, 2.1352, 1.7524, 2.0695, 1.5792, 4.4662], + device='cuda:0'), covar=tensor([0.0680, 0.0765, 0.0850, 0.1225, 0.0699, 0.0609, 0.0752, 0.0124], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0040, 0.0041, 0.0045, 0.0041, 0.0040, 0.0040, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 17:34:48,531 INFO [finetune.py:976] (0/7) Epoch 6, batch 850, loss[loss=0.181, simple_loss=0.2528, pruned_loss=0.05463, over 4783.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.2712, pruned_loss=0.07391, over 944062.68 frames. ], batch size: 26, lr: 3.92e-03, grad_scale: 16.0 +2023-04-26 17:35:08,005 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29517.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:35:14,512 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.686e+02 1.967e+02 2.412e+02 4.596e+02, threshold=3.934e+02, percent-clipped=4.0 +2023-04-26 17:35:22,136 INFO [finetune.py:976] (0/7) Epoch 6, batch 900, loss[loss=0.2105, simple_loss=0.2665, pruned_loss=0.07724, over 4844.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2671, pruned_loss=0.07208, over 943510.38 frames. ], batch size: 44, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:35:30,021 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29551.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:35:38,974 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=29565.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:35:56,021 INFO [finetune.py:976] (0/7) Epoch 6, batch 950, loss[loss=0.2287, simple_loss=0.297, pruned_loss=0.08019, over 4810.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.2653, pruned_loss=0.07157, over 945678.91 frames. ], batch size: 41, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:35:57,874 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29592.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:36:10,609 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29612.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:36:15,226 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-26 17:36:27,225 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.239e+02 1.864e+02 2.273e+02 2.659e+02 4.416e+02, threshold=4.547e+02, percent-clipped=3.0 +2023-04-26 17:36:39,506 INFO [finetune.py:976] (0/7) Epoch 6, batch 1000, loss[loss=0.2112, simple_loss=0.2613, pruned_loss=0.08057, over 4716.00 frames. ], tot_loss[loss=0.2057, simple_loss=0.2672, pruned_loss=0.0721, over 948147.12 frames. ], batch size: 23, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:37:09,264 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29658.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:37:11,698 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29662.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:37:52,136 INFO [finetune.py:976] (0/7) Epoch 6, batch 1050, loss[loss=0.1917, simple_loss=0.2531, pruned_loss=0.06518, over 4211.00 frames. ], tot_loss[loss=0.2075, simple_loss=0.2694, pruned_loss=0.07283, over 949568.71 frames. ], batch size: 66, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:38:13,506 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=29706.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:38:34,977 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29723.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 17:38:39,286 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.744e+02 2.054e+02 2.475e+02 7.840e+02, threshold=4.108e+02, percent-clipped=2.0 +2023-04-26 17:38:57,681 INFO [finetune.py:976] (0/7) Epoch 6, batch 1100, loss[loss=0.1715, simple_loss=0.2348, pruned_loss=0.05409, over 4828.00 frames. ], tot_loss[loss=0.2093, simple_loss=0.2715, pruned_loss=0.07348, over 949942.84 frames. ], batch size: 30, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:39:08,771 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.88 vs. limit=5.0 +2023-04-26 17:39:12,428 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-26 17:39:46,376 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-26 17:39:58,707 INFO [finetune.py:976] (0/7) Epoch 6, batch 1150, loss[loss=0.225, simple_loss=0.297, pruned_loss=0.07649, over 4896.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2729, pruned_loss=0.07371, over 951164.03 frames. ], batch size: 46, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:40:07,756 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=29802.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:40:22,249 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6051, 1.8087, 1.8234, 1.2457, 1.7032, 1.4601, 2.3083, 1.4000], + device='cuda:0'), covar=tensor([0.3907, 0.1687, 0.4313, 0.2974, 0.1993, 0.2596, 0.1545, 0.4668], + device='cuda:0'), in_proj_covar=tensor([0.0352, 0.0359, 0.0441, 0.0371, 0.0399, 0.0389, 0.0395, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:40:24,363 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.784e+02 2.105e+02 2.513e+02 8.942e+02, threshold=4.210e+02, percent-clipped=4.0 +2023-04-26 17:40:31,967 INFO [finetune.py:976] (0/7) Epoch 6, batch 1200, loss[loss=0.1718, simple_loss=0.2467, pruned_loss=0.04845, over 4798.00 frames. ], tot_loss[loss=0.2082, simple_loss=0.2707, pruned_loss=0.07287, over 953270.14 frames. ], batch size: 45, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:40:48,151 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=29863.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:41:05,314 INFO [finetune.py:976] (0/7) Epoch 6, batch 1250, loss[loss=0.1906, simple_loss=0.2588, pruned_loss=0.0612, over 4902.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2686, pruned_loss=0.07251, over 954847.40 frames. ], batch size: 36, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:41:07,186 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=29892.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:41:17,256 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=29907.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:41:21,675 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.77 vs. limit=5.0 +2023-04-26 17:41:30,521 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.701e+02 1.937e+02 2.233e+02 5.324e+02, threshold=3.875e+02, percent-clipped=2.0 +2023-04-26 17:41:38,729 INFO [finetune.py:976] (0/7) Epoch 6, batch 1300, loss[loss=0.2122, simple_loss=0.2615, pruned_loss=0.08145, over 4851.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2662, pruned_loss=0.07219, over 954178.25 frames. ], batch size: 49, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:41:39,345 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=29940.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:41:39,417 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8468, 1.2263, 1.4068, 1.4808, 2.0925, 1.6487, 1.3329, 1.3686], + device='cuda:0'), covar=tensor([0.1622, 0.1936, 0.2281, 0.1767, 0.0987, 0.1969, 0.2542, 0.2168], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0332, 0.0351, 0.0307, 0.0341, 0.0335, 0.0307, 0.0352], + device='cuda:0'), out_proj_covar=tensor([6.6690e-05, 7.0973e-05, 7.6135e-05, 6.4139e-05, 7.2098e-05, 7.2674e-05, + 6.6456e-05, 7.5777e-05], device='cuda:0') +2023-04-26 17:42:04,094 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-26 17:42:12,108 INFO [finetune.py:976] (0/7) Epoch 6, batch 1350, loss[loss=0.2722, simple_loss=0.3217, pruned_loss=0.1114, over 4819.00 frames. ], tot_loss[loss=0.205, simple_loss=0.266, pruned_loss=0.07195, over 954791.19 frames. ], batch size: 39, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:42:19,974 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-30000.pt +2023-04-26 17:42:32,664 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30018.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 17:42:39,145 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.856e+02 2.103e+02 2.497e+02 6.080e+02, threshold=4.207e+02, percent-clipped=3.0 +2023-04-26 17:42:52,096 INFO [finetune.py:976] (0/7) Epoch 6, batch 1400, loss[loss=0.2147, simple_loss=0.2954, pruned_loss=0.06697, over 4827.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2706, pruned_loss=0.07371, over 954618.45 frames. ], batch size: 30, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:43:02,264 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-26 17:43:56,758 INFO [finetune.py:976] (0/7) Epoch 6, batch 1450, loss[loss=0.23, simple_loss=0.2917, pruned_loss=0.08412, over 4904.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2712, pruned_loss=0.07316, over 955226.96 frames. ], batch size: 32, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:44:42,749 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30125.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:44:44,462 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.421e+02 1.906e+02 2.234e+02 2.642e+02 4.905e+02, threshold=4.468e+02, percent-clipped=1.0 +2023-04-26 17:44:57,143 INFO [finetune.py:976] (0/7) Epoch 6, batch 1500, loss[loss=0.1978, simple_loss=0.268, pruned_loss=0.06383, over 4837.00 frames. ], tot_loss[loss=0.212, simple_loss=0.2741, pruned_loss=0.07498, over 953077.13 frames. ], batch size: 30, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:44:57,306 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7862, 1.5568, 1.9587, 2.1701, 1.9094, 1.7155, 1.8494, 1.8987], + device='cuda:0'), covar=tensor([0.9577, 1.2897, 1.3940, 1.3937, 1.1437, 1.6736, 1.6616, 1.3319], + device='cuda:0'), in_proj_covar=tensor([0.0416, 0.0445, 0.0529, 0.0552, 0.0446, 0.0467, 0.0478, 0.0478], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:45:25,793 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30158.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:45:59,232 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30186.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:46:00,953 INFO [finetune.py:976] (0/7) Epoch 6, batch 1550, loss[loss=0.1671, simple_loss=0.2287, pruned_loss=0.05275, over 4764.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2734, pruned_loss=0.07425, over 954276.78 frames. ], batch size: 27, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:46:30,717 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30207.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:46:31,956 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30209.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:46:56,314 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.842e+02 2.143e+02 2.537e+02 6.404e+02, threshold=4.287e+02, percent-clipped=2.0 +2023-04-26 17:47:09,505 INFO [finetune.py:976] (0/7) Epoch 6, batch 1600, loss[loss=0.1764, simple_loss=0.2311, pruned_loss=0.06084, over 4199.00 frames. ], tot_loss[loss=0.2096, simple_loss=0.2715, pruned_loss=0.07386, over 955820.33 frames. ], batch size: 66, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:47:31,236 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=30255.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:47:37,944 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-26 17:47:41,428 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30270.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:47:46,942 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0512, 2.0202, 2.2988, 2.5801, 2.4758, 2.0101, 1.7412, 2.1317], + device='cuda:0'), covar=tensor([0.1165, 0.1123, 0.0628, 0.0700, 0.0726, 0.1090, 0.1040, 0.0675], + device='cuda:0'), in_proj_covar=tensor([0.0202, 0.0205, 0.0181, 0.0178, 0.0179, 0.0194, 0.0165, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:47:53,334 INFO [finetune.py:976] (0/7) Epoch 6, batch 1650, loss[loss=0.145, simple_loss=0.2136, pruned_loss=0.03822, over 4774.00 frames. ], tot_loss[loss=0.2071, simple_loss=0.2685, pruned_loss=0.07287, over 956629.11 frames. ], batch size: 28, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:48:13,369 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30318.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 17:48:16,459 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6351, 1.8690, 1.7511, 1.9664, 1.7418, 1.9853, 1.8422, 1.8310], + device='cuda:0'), covar=tensor([0.7165, 1.1784, 0.9744, 0.7828, 0.9909, 1.3265, 1.1387, 1.0661], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0396, 0.0318, 0.0328, 0.0347, 0.0413, 0.0377, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:48:19,322 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.688e+02 2.054e+02 2.477e+02 3.898e+02, threshold=4.108e+02, percent-clipped=0.0 +2023-04-26 17:48:26,038 INFO [finetune.py:976] (0/7) Epoch 6, batch 1700, loss[loss=0.2172, simple_loss=0.2883, pruned_loss=0.07307, over 4863.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2658, pruned_loss=0.07205, over 955594.39 frames. ], batch size: 49, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:48:26,396 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-26 17:48:51,274 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30360.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:49:00,632 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=30366.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:49:26,051 INFO [finetune.py:976] (0/7) Epoch 6, batch 1750, loss[loss=0.2482, simple_loss=0.3147, pruned_loss=0.09088, over 4822.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.268, pruned_loss=0.07281, over 955221.65 frames. ], batch size: 51, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:49:37,073 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1582, 1.5214, 1.2780, 1.6889, 1.4731, 2.0257, 1.3353, 3.4072], + device='cuda:0'), covar=tensor([0.0642, 0.0777, 0.0810, 0.1159, 0.0640, 0.0546, 0.0790, 0.0147], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0045, 0.0041, 0.0040, 0.0040, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 17:50:03,350 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30421.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:50:08,042 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.357e+02 1.827e+02 2.242e+02 2.631e+02 5.410e+02, threshold=4.483e+02, percent-clipped=4.0 +2023-04-26 17:50:20,187 INFO [finetune.py:976] (0/7) Epoch 6, batch 1800, loss[loss=0.2371, simple_loss=0.2919, pruned_loss=0.0911, over 4295.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2709, pruned_loss=0.07321, over 955913.11 frames. ], batch size: 65, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:50:20,941 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7500, 1.9255, 1.8743, 2.0716, 1.7454, 2.0517, 1.9517, 1.9614], + device='cuda:0'), covar=tensor([0.6212, 1.0937, 0.8864, 0.7415, 0.9227, 1.2840, 1.1549, 0.9916], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0393, 0.0316, 0.0326, 0.0344, 0.0411, 0.0373, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:50:43,033 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30458.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:51:02,720 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0027, 1.4470, 1.3258, 1.6920, 1.4938, 1.6016, 1.3485, 2.4910], + device='cuda:0'), covar=tensor([0.0654, 0.0843, 0.0806, 0.1222, 0.0675, 0.0495, 0.0793, 0.0221], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0045, 0.0041, 0.0040, 0.0040, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 17:51:09,062 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30481.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:51:13,897 INFO [finetune.py:976] (0/7) Epoch 6, batch 1850, loss[loss=0.2271, simple_loss=0.2915, pruned_loss=0.08132, over 4906.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2718, pruned_loss=0.07404, over 954556.50 frames. ], batch size: 36, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:51:15,220 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0392, 2.8914, 2.3313, 2.6277, 2.0578, 2.4643, 2.5686, 1.9997], + device='cuda:0'), covar=tensor([0.2500, 0.1507, 0.0984, 0.1454, 0.3058, 0.1467, 0.2013, 0.2759], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0324, 0.0235, 0.0297, 0.0317, 0.0279, 0.0264, 0.0290], + device='cuda:0'), out_proj_covar=tensor([1.2338e-04, 1.3155e-04, 9.5370e-05, 1.1925e-04, 1.3038e-04, 1.1276e-04, + 1.0851e-04, 1.1663e-04], device='cuda:0') +2023-04-26 17:51:24,866 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=30506.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:51:40,049 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.288e+02 1.791e+02 2.106e+02 2.463e+02 4.581e+02, threshold=4.213e+02, percent-clipped=1.0 +2023-04-26 17:51:46,806 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-26 17:51:47,205 INFO [finetune.py:976] (0/7) Epoch 6, batch 1900, loss[loss=0.222, simple_loss=0.2753, pruned_loss=0.08438, over 4262.00 frames. ], tot_loss[loss=0.2109, simple_loss=0.2733, pruned_loss=0.07421, over 954359.77 frames. ], batch size: 66, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:51:48,010 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.74 vs. limit=5.0 +2023-04-26 17:51:56,463 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3745, 3.4366, 1.0411, 1.7504, 1.8177, 2.4895, 1.9735, 1.0750], + device='cuda:0'), covar=tensor([0.1562, 0.1381, 0.2169, 0.1543, 0.1235, 0.1199, 0.1637, 0.2247], + device='cuda:0'), in_proj_covar=tensor([0.0121, 0.0259, 0.0146, 0.0127, 0.0137, 0.0159, 0.0122, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 17:52:17,566 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30565.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:52:24,659 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0711, 1.4574, 1.9119, 1.9800, 1.7902, 1.4278, 0.9738, 1.5032], + device='cuda:0'), covar=tensor([0.3949, 0.4430, 0.2036, 0.3327, 0.3588, 0.3341, 0.5689, 0.3226], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0258, 0.0220, 0.0328, 0.0218, 0.0230, 0.0243, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:52:34,610 INFO [finetune.py:976] (0/7) Epoch 6, batch 1950, loss[loss=0.2367, simple_loss=0.3109, pruned_loss=0.08122, over 4847.00 frames. ], tot_loss[loss=0.21, simple_loss=0.272, pruned_loss=0.074, over 954205.56 frames. ], batch size: 47, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:53:00,332 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.660e+02 1.977e+02 2.493e+02 4.247e+02, threshold=3.954e+02, percent-clipped=1.0 +2023-04-26 17:53:04,700 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-26 17:53:08,042 INFO [finetune.py:976] (0/7) Epoch 6, batch 2000, loss[loss=0.2222, simple_loss=0.2819, pruned_loss=0.08124, over 4888.00 frames. ], tot_loss[loss=0.2086, simple_loss=0.2697, pruned_loss=0.07372, over 953905.43 frames. ], batch size: 35, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:53:10,535 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1196, 1.6959, 1.4468, 1.8518, 1.6180, 2.1051, 1.4192, 3.5986], + device='cuda:0'), covar=tensor([0.0697, 0.0742, 0.0790, 0.1166, 0.0649, 0.0499, 0.0740, 0.0148], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0045, 0.0041, 0.0040, 0.0040, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 17:53:17,870 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30655.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:53:41,351 INFO [finetune.py:976] (0/7) Epoch 6, batch 2050, loss[loss=0.1897, simple_loss=0.2566, pruned_loss=0.06145, over 4787.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2667, pruned_loss=0.07174, over 955804.12 frames. ], batch size: 25, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:53:58,923 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=30716.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:53:58,982 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30716.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 17:54:12,372 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.673e+02 1.948e+02 2.190e+02 5.524e+02, threshold=3.896e+02, percent-clipped=3.0 +2023-04-26 17:54:12,545 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8561, 1.5739, 2.1075, 2.2508, 1.9983, 1.7713, 1.8676, 1.9985], + device='cuda:0'), covar=tensor([0.9956, 1.3507, 1.4077, 1.4267, 1.1441, 1.6737, 1.7269, 1.3727], + device='cuda:0'), in_proj_covar=tensor([0.0417, 0.0445, 0.0529, 0.0551, 0.0446, 0.0466, 0.0480, 0.0479], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:54:25,816 INFO [finetune.py:976] (0/7) Epoch 6, batch 2100, loss[loss=0.2203, simple_loss=0.275, pruned_loss=0.08285, over 4916.00 frames. ], tot_loss[loss=0.2033, simple_loss=0.2648, pruned_loss=0.07092, over 955423.27 frames. ], batch size: 36, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:54:43,277 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6109, 3.5597, 0.7079, 1.8507, 1.9247, 2.4577, 2.0569, 1.0270], + device='cuda:0'), covar=tensor([0.1438, 0.0957, 0.2341, 0.1441, 0.1173, 0.1148, 0.1483, 0.2117], + device='cuda:0'), in_proj_covar=tensor([0.0122, 0.0262, 0.0147, 0.0128, 0.0138, 0.0160, 0.0123, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 17:54:45,752 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6197, 1.1813, 1.2724, 1.3589, 1.9171, 1.5391, 1.1810, 1.1769], + device='cuda:0'), covar=tensor([0.1728, 0.1596, 0.2045, 0.1453, 0.0745, 0.1618, 0.2351, 0.2153], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0332, 0.0350, 0.0307, 0.0341, 0.0336, 0.0309, 0.0352], + device='cuda:0'), out_proj_covar=tensor([6.6640e-05, 7.1038e-05, 7.5830e-05, 6.4009e-05, 7.1918e-05, 7.2661e-05, + 6.7076e-05, 7.5829e-05], device='cuda:0') +2023-04-26 17:54:54,242 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30781.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:54:59,003 INFO [finetune.py:976] (0/7) Epoch 6, batch 2150, loss[loss=0.1615, simple_loss=0.2391, pruned_loss=0.04199, over 4748.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2694, pruned_loss=0.07301, over 955390.02 frames. ], batch size: 27, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:55:40,195 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.783e+02 2.191e+02 2.615e+02 4.573e+02, threshold=4.381e+02, percent-clipped=3.0 +2023-04-26 17:55:40,857 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=30829.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:55:53,208 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8050, 3.8655, 2.6921, 4.4681, 3.8787, 3.8914, 1.7548, 3.7676], + device='cuda:0'), covar=tensor([0.1641, 0.1041, 0.3279, 0.1662, 0.3141, 0.1827, 0.5947, 0.2464], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0220, 0.0256, 0.0315, 0.0304, 0.0256, 0.0277, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:55:54,381 INFO [finetune.py:976] (0/7) Epoch 6, batch 2200, loss[loss=0.208, simple_loss=0.2709, pruned_loss=0.07257, over 4918.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.271, pruned_loss=0.07317, over 956233.32 frames. ], batch size: 38, lr: 3.91e-03, grad_scale: 16.0 +2023-04-26 17:56:23,811 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=30865.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:56:31,449 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6525, 3.6109, 0.9502, 1.9552, 2.0340, 2.5132, 2.1434, 0.9122], + device='cuda:0'), covar=tensor([0.1398, 0.0956, 0.2033, 0.1295, 0.1067, 0.1091, 0.1391, 0.2172], + device='cuda:0'), in_proj_covar=tensor([0.0123, 0.0262, 0.0147, 0.0128, 0.0138, 0.0161, 0.0123, 0.0127], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 17:56:33,286 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0764, 2.4092, 0.9822, 1.3262, 2.0041, 1.3439, 3.0737, 1.7054], + device='cuda:0'), covar=tensor([0.0640, 0.0566, 0.0749, 0.1229, 0.0445, 0.0972, 0.0268, 0.0616], + device='cuda:0'), in_proj_covar=tensor([0.0053, 0.0069, 0.0051, 0.0048, 0.0053, 0.0054, 0.0081, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 17:56:51,897 INFO [finetune.py:976] (0/7) Epoch 6, batch 2250, loss[loss=0.2142, simple_loss=0.2721, pruned_loss=0.07818, over 4773.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2714, pruned_loss=0.07255, over 958187.35 frames. ], batch size: 27, lr: 3.91e-03, grad_scale: 32.0 +2023-04-26 17:57:11,913 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30903.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:57:22,851 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=30913.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:57:35,495 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30925.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:57:41,994 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3363, 3.3219, 2.3774, 3.8781, 3.3340, 3.3506, 1.2476, 3.3151], + device='cuda:0'), covar=tensor([0.1904, 0.1398, 0.3286, 0.2428, 0.3134, 0.2111, 0.5912, 0.2455], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0220, 0.0254, 0.0313, 0.0302, 0.0255, 0.0275, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:57:42,530 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.673e+02 1.940e+02 2.404e+02 3.842e+02, threshold=3.880e+02, percent-clipped=0.0 +2023-04-26 17:57:46,822 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=30934.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:57:50,253 INFO [finetune.py:976] (0/7) Epoch 6, batch 2300, loss[loss=0.1997, simple_loss=0.2688, pruned_loss=0.06529, over 4824.00 frames. ], tot_loss[loss=0.2064, simple_loss=0.27, pruned_loss=0.07137, over 957590.81 frames. ], batch size: 38, lr: 3.91e-03, grad_scale: 32.0 +2023-04-26 17:58:07,553 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30964.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:58:14,179 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5969, 1.0371, 1.3634, 1.1780, 1.7336, 1.4493, 1.1632, 1.2872], + device='cuda:0'), covar=tensor([0.1900, 0.1650, 0.2185, 0.1640, 0.1107, 0.1529, 0.2006, 0.2365], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0331, 0.0349, 0.0305, 0.0340, 0.0335, 0.0308, 0.0351], + device='cuda:0'), out_proj_covar=tensor([6.6442e-05, 7.0624e-05, 7.5658e-05, 6.3594e-05, 7.1838e-05, 7.2520e-05, + 6.6845e-05, 7.5560e-05], device='cuda:0') +2023-04-26 17:58:21,373 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30986.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:58:23,531 INFO [finetune.py:976] (0/7) Epoch 6, batch 2350, loss[loss=0.2458, simple_loss=0.2852, pruned_loss=0.1032, over 4814.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.2686, pruned_loss=0.07151, over 955564.32 frames. ], batch size: 40, lr: 3.91e-03, grad_scale: 32.0 +2023-04-26 17:58:28,678 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=30995.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:58:31,875 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8602, 1.7756, 2.0281, 2.3749, 2.3851, 1.9094, 1.6029, 2.0714], + device='cuda:0'), covar=tensor([0.0983, 0.1121, 0.0715, 0.0700, 0.0636, 0.0977, 0.0895, 0.0671], + device='cuda:0'), in_proj_covar=tensor([0.0202, 0.0207, 0.0182, 0.0178, 0.0180, 0.0195, 0.0166, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 17:58:32,776 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.3193, 4.1467, 3.0152, 5.0019, 4.2908, 4.3307, 1.7799, 4.3298], + device='cuda:0'), covar=tensor([0.1539, 0.1168, 0.3459, 0.1071, 0.2921, 0.1787, 0.5768, 0.2133], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0221, 0.0255, 0.0314, 0.0303, 0.0255, 0.0276, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0003, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:58:39,465 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31011.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 17:58:39,492 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31011.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 17:58:42,538 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31016.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:58:49,762 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.679e+02 2.129e+02 2.680e+02 4.388e+02, threshold=4.258e+02, percent-clipped=1.0 +2023-04-26 17:58:51,152 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3917, 1.6434, 1.6481, 1.8089, 1.7072, 1.8847, 1.7350, 1.7256], + device='cuda:0'), covar=tensor([0.6375, 0.9425, 0.8142, 0.6947, 0.8412, 1.1560, 0.9041, 0.8332], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0395, 0.0317, 0.0328, 0.0346, 0.0412, 0.0376, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:58:56,918 INFO [finetune.py:976] (0/7) Epoch 6, batch 2400, loss[loss=0.2149, simple_loss=0.2506, pruned_loss=0.08956, over 3765.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2656, pruned_loss=0.07061, over 955749.75 frames. ], batch size: 15, lr: 3.91e-03, grad_scale: 32.0 +2023-04-26 17:59:14,455 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1446, 1.5526, 1.9702, 2.0608, 1.9320, 1.4863, 1.0478, 1.5692], + device='cuda:0'), covar=tensor([0.3691, 0.4204, 0.1889, 0.3060, 0.3419, 0.3311, 0.5332, 0.3341], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0258, 0.0221, 0.0330, 0.0218, 0.0231, 0.0243, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 17:59:14,967 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=31064.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 17:59:16,252 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0149, 2.8153, 1.8941, 1.9887, 1.5281, 1.4942, 2.1973, 1.3783], + device='cuda:0'), covar=tensor([0.2270, 0.1790, 0.2016, 0.2378, 0.2971, 0.2417, 0.1430, 0.2571], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0221, 0.0177, 0.0207, 0.0212, 0.0187, 0.0169, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 17:59:19,934 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31072.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 17:59:25,514 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7150, 1.7659, 0.7574, 1.3904, 1.9126, 1.5836, 1.4402, 1.4960], + device='cuda:0'), covar=tensor([0.0554, 0.0401, 0.0414, 0.0606, 0.0280, 0.0543, 0.0544, 0.0626], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0027, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0049, 0.0037, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 17:59:28,012 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8332, 2.4886, 1.8282, 1.7278, 1.4089, 1.4277, 1.9769, 1.3276], + device='cuda:0'), covar=tensor([0.1878, 0.1497, 0.1726, 0.2089, 0.2643, 0.2097, 0.1187, 0.2278], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0220, 0.0177, 0.0207, 0.0212, 0.0186, 0.0169, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 17:59:30,874 INFO [finetune.py:976] (0/7) Epoch 6, batch 2450, loss[loss=0.1928, simple_loss=0.2595, pruned_loss=0.06301, over 4903.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2624, pruned_loss=0.06946, over 954472.71 frames. ], batch size: 43, lr: 3.91e-03, grad_scale: 32.0 +2023-04-26 17:59:57,657 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.091e+02 1.941e+02 2.240e+02 2.729e+02 4.178e+02, threshold=4.480e+02, percent-clipped=0.0 +2023-04-26 18:00:04,429 INFO [finetune.py:976] (0/7) Epoch 6, batch 2500, loss[loss=0.2101, simple_loss=0.2756, pruned_loss=0.07233, over 4848.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2634, pruned_loss=0.06998, over 956148.65 frames. ], batch size: 49, lr: 3.91e-03, grad_scale: 32.0 +2023-04-26 18:00:31,768 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.78 vs. limit=5.0 +2023-04-26 18:00:37,627 INFO [finetune.py:976] (0/7) Epoch 6, batch 2550, loss[loss=0.2409, simple_loss=0.3057, pruned_loss=0.08809, over 4757.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.268, pruned_loss=0.0722, over 954765.51 frames. ], batch size: 54, lr: 3.91e-03, grad_scale: 32.0 +2023-04-26 18:00:43,078 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7108, 3.6046, 2.8721, 4.3128, 3.6589, 3.7214, 1.6965, 3.6780], + device='cuda:0'), covar=tensor([0.1594, 0.1248, 0.3310, 0.1316, 0.3101, 0.1819, 0.5384, 0.2447], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0219, 0.0253, 0.0312, 0.0300, 0.0254, 0.0273, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:00:50,226 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5757, 2.1755, 1.6270, 1.4802, 1.2470, 1.2670, 1.6519, 1.1614], + device='cuda:0'), covar=tensor([0.1768, 0.1547, 0.1651, 0.2058, 0.2590, 0.2043, 0.1225, 0.2239], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0220, 0.0177, 0.0207, 0.0212, 0.0186, 0.0168, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:01:09,724 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.313e+02 1.772e+02 2.143e+02 2.617e+02 5.206e+02, threshold=4.285e+02, percent-clipped=3.0 +2023-04-26 18:01:22,015 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0291, 1.3627, 1.3717, 1.7727, 1.4695, 1.7428, 1.3063, 3.0881], + device='cuda:0'), covar=tensor([0.0701, 0.0776, 0.0791, 0.1140, 0.0676, 0.0502, 0.0784, 0.0197], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0045, 0.0041, 0.0040, 0.0040, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 18:01:22,512 INFO [finetune.py:976] (0/7) Epoch 6, batch 2600, loss[loss=0.2351, simple_loss=0.2915, pruned_loss=0.08939, over 4778.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.2698, pruned_loss=0.07245, over 956166.19 frames. ], batch size: 29, lr: 3.91e-03, grad_scale: 32.0 +2023-04-26 18:01:23,242 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.3362, 1.3815, 1.4272, 0.9510, 1.3035, 1.2476, 1.7189, 1.3786], + device='cuda:0'), covar=tensor([0.3681, 0.1716, 0.5129, 0.2732, 0.1603, 0.2090, 0.1481, 0.4434], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0354, 0.0439, 0.0368, 0.0392, 0.0384, 0.0389, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:01:51,846 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31259.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:02:10,469 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6966, 2.1522, 1.8071, 2.0844, 1.5149, 1.8873, 1.8328, 1.4618], + device='cuda:0'), covar=tensor([0.2262, 0.1637, 0.0986, 0.1539, 0.3504, 0.1290, 0.2101, 0.2838], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0323, 0.0233, 0.0294, 0.0314, 0.0275, 0.0262, 0.0286], + device='cuda:0'), out_proj_covar=tensor([1.2237e-04, 1.3112e-04, 9.4516e-05, 1.1816e-04, 1.2936e-04, 1.1140e-04, + 1.0770e-04, 1.1530e-04], device='cuda:0') +2023-04-26 18:02:14,131 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31281.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:02:24,319 INFO [finetune.py:976] (0/7) Epoch 6, batch 2650, loss[loss=0.2168, simple_loss=0.2753, pruned_loss=0.07918, over 4197.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.273, pruned_loss=0.07416, over 954680.11 frames. ], batch size: 65, lr: 3.91e-03, grad_scale: 32.0 +2023-04-26 18:02:24,976 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31290.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:02:55,880 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31311.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:03:18,344 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.837e+02 2.164e+02 2.605e+02 4.419e+02, threshold=4.327e+02, percent-clipped=1.0 +2023-04-26 18:03:30,926 INFO [finetune.py:976] (0/7) Epoch 6, batch 2700, loss[loss=0.2226, simple_loss=0.2815, pruned_loss=0.08186, over 4921.00 frames. ], tot_loss[loss=0.2094, simple_loss=0.2719, pruned_loss=0.07343, over 954689.17 frames. ], batch size: 38, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:04:01,822 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=31359.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:04:14,062 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31367.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 18:04:23,813 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2475, 1.4904, 1.2502, 1.4697, 1.3478, 1.2371, 1.3128, 1.1221], + device='cuda:0'), covar=tensor([0.1861, 0.1446, 0.1148, 0.1371, 0.3426, 0.1485, 0.1806, 0.2286], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0324, 0.0234, 0.0295, 0.0316, 0.0277, 0.0263, 0.0288], + device='cuda:0'), out_proj_covar=tensor([1.2307e-04, 1.3179e-04, 9.5085e-05, 1.1827e-04, 1.2997e-04, 1.1188e-04, + 1.0802e-04, 1.1573e-04], device='cuda:0') +2023-04-26 18:04:45,000 INFO [finetune.py:976] (0/7) Epoch 6, batch 2750, loss[loss=0.1692, simple_loss=0.229, pruned_loss=0.05466, over 4790.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2688, pruned_loss=0.07241, over 952213.16 frames. ], batch size: 25, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:05:10,265 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31409.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:05:28,792 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.050e+02 1.757e+02 2.143e+02 2.485e+02 4.889e+02, threshold=4.286e+02, percent-clipped=1.0 +2023-04-26 18:05:35,955 INFO [finetune.py:976] (0/7) Epoch 6, batch 2800, loss[loss=0.1817, simple_loss=0.2471, pruned_loss=0.05812, over 4920.00 frames. ], tot_loss[loss=0.2046, simple_loss=0.266, pruned_loss=0.07158, over 950204.95 frames. ], batch size: 36, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:05:57,397 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31470.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:06:09,972 INFO [finetune.py:976] (0/7) Epoch 6, batch 2850, loss[loss=0.2802, simple_loss=0.3238, pruned_loss=0.1184, over 4103.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2662, pruned_loss=0.07207, over 952395.97 frames. ], batch size: 65, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:06:10,098 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4298, 2.3624, 1.8975, 2.1440, 2.3949, 1.9736, 3.2168, 1.7277], + device='cuda:0'), covar=tensor([0.4302, 0.2315, 0.5940, 0.3682, 0.2273, 0.3022, 0.1884, 0.4880], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0354, 0.0437, 0.0367, 0.0392, 0.0384, 0.0387, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:06:11,327 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31491.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 18:06:36,454 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.825e+02 2.131e+02 2.409e+02 4.261e+02, threshold=4.261e+02, percent-clipped=0.0 +2023-04-26 18:06:40,369 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-26 18:06:43,783 INFO [finetune.py:976] (0/7) Epoch 6, batch 2900, loss[loss=0.2863, simple_loss=0.3461, pruned_loss=0.1133, over 4761.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2684, pruned_loss=0.07257, over 951105.75 frames. ], batch size: 54, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:06:47,527 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6361, 1.3385, 1.7891, 1.7380, 1.4163, 1.2229, 1.3960, 0.9675], + device='cuda:0'), covar=tensor([0.0590, 0.0980, 0.0596, 0.0855, 0.0900, 0.1419, 0.0722, 0.1011], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0075, 0.0074, 0.0068, 0.0078, 0.0095, 0.0081, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:06:51,759 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31552.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 18:06:56,001 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31559.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:07:06,435 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7561, 2.4944, 1.8478, 1.6818, 1.3138, 1.3562, 1.9340, 1.2970], + device='cuda:0'), covar=tensor([0.2075, 0.1685, 0.1876, 0.2234, 0.2921, 0.2471, 0.1308, 0.2407], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0220, 0.0176, 0.0205, 0.0211, 0.0186, 0.0168, 0.0192], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:07:11,678 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31581.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:07:16,954 INFO [finetune.py:976] (0/7) Epoch 6, batch 2950, loss[loss=0.2353, simple_loss=0.2992, pruned_loss=0.08574, over 4803.00 frames. ], tot_loss[loss=0.2083, simple_loss=0.2709, pruned_loss=0.07289, over 953304.88 frames. ], batch size: 41, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:07:17,670 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31590.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:07:28,067 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=31607.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:07:42,550 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.797e+02 2.161e+02 2.728e+02 5.785e+02, threshold=4.321e+02, percent-clipped=2.0 +2023-04-26 18:07:43,215 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=31629.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:07:49,131 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=31638.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:07:49,694 INFO [finetune.py:976] (0/7) Epoch 6, batch 3000, loss[loss=0.1827, simple_loss=0.2395, pruned_loss=0.06294, over 4359.00 frames. ], tot_loss[loss=0.2112, simple_loss=0.2735, pruned_loss=0.07449, over 952436.89 frames. ], batch size: 19, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:07:49,695 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 18:07:54,111 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5036, 1.2861, 1.6850, 1.5922, 1.3693, 1.1954, 1.3290, 0.9387], + device='cuda:0'), covar=tensor([0.0603, 0.0945, 0.0571, 0.0833, 0.1024, 0.1329, 0.0810, 0.0936], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0075, 0.0074, 0.0067, 0.0078, 0.0095, 0.0081, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:08:00,215 INFO [finetune.py:1010] (0/7) Epoch 6, validation: loss=0.1565, simple_loss=0.2301, pruned_loss=0.04144, over 2265189.00 frames. +2023-04-26 18:08:00,215 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 18:08:17,854 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=31667.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 18:08:48,666 INFO [finetune.py:976] (0/7) Epoch 6, batch 3050, loss[loss=0.2041, simple_loss=0.2748, pruned_loss=0.06667, over 4860.00 frames. ], tot_loss[loss=0.2107, simple_loss=0.2735, pruned_loss=0.07393, over 953917.52 frames. ], batch size: 31, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:09:09,598 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6594, 2.2720, 1.7471, 1.4285, 1.2077, 1.2761, 1.8232, 1.2131], + device='cuda:0'), covar=tensor([0.2066, 0.1760, 0.1909, 0.2390, 0.3060, 0.2472, 0.1327, 0.2530], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0220, 0.0176, 0.0206, 0.0211, 0.0187, 0.0168, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:09:10,790 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31704.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:09:20,300 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1057, 1.4305, 1.9086, 2.2202, 1.8299, 1.3978, 1.1288, 1.5899], + device='cuda:0'), covar=tensor([0.3605, 0.4235, 0.1997, 0.2935, 0.3549, 0.3341, 0.5289, 0.3037], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0256, 0.0220, 0.0327, 0.0216, 0.0230, 0.0242, 0.0192], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:09:23,337 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=31715.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 18:09:40,854 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.692e+02 2.096e+02 2.514e+02 4.783e+02, threshold=4.192e+02, percent-clipped=1.0 +2023-04-26 18:09:53,205 INFO [finetune.py:976] (0/7) Epoch 6, batch 3100, loss[loss=0.1849, simple_loss=0.2432, pruned_loss=0.06332, over 4826.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2706, pruned_loss=0.07272, over 955564.32 frames. ], batch size: 30, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:10:25,839 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:10:25,886 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31765.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:10:56,963 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31788.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:10:57,966 INFO [finetune.py:976] (0/7) Epoch 6, batch 3150, loss[loss=0.1858, simple_loss=0.2508, pruned_loss=0.06046, over 4797.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2667, pruned_loss=0.07139, over 954718.41 frames. ], batch size: 51, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:11:42,875 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31823.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:11:51,413 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.949e+01 1.610e+02 1.947e+02 2.415e+02 6.715e+02, threshold=3.894e+02, percent-clipped=1.0 +2023-04-26 18:12:02,481 INFO [finetune.py:976] (0/7) Epoch 6, batch 3200, loss[loss=0.1723, simple_loss=0.2276, pruned_loss=0.05855, over 4793.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2632, pruned_loss=0.07008, over 955205.50 frames. ], batch size: 29, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:12:14,967 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=31847.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 18:12:16,735 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31849.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:12:57,058 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31878.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:13:00,707 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31884.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:13:09,142 INFO [finetune.py:976] (0/7) Epoch 6, batch 3250, loss[loss=0.2503, simple_loss=0.3025, pruned_loss=0.09901, over 4868.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2637, pruned_loss=0.07012, over 955760.24 frames. ], batch size: 34, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:13:39,522 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31915.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:13:57,726 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.762e+02 2.242e+02 2.827e+02 6.116e+02, threshold=4.483e+02, percent-clipped=8.0 +2023-04-26 18:14:10,187 INFO [finetune.py:976] (0/7) Epoch 6, batch 3300, loss[loss=0.1619, simple_loss=0.2324, pruned_loss=0.0457, over 4692.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2684, pruned_loss=0.07197, over 956634.52 frames. ], batch size: 23, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:14:10,326 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31939.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:14:18,827 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=31950.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:14:33,032 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.69 vs. limit=2.0 +2023-04-26 18:14:35,915 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=31976.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:14:43,707 INFO [finetune.py:976] (0/7) Epoch 6, batch 3350, loss[loss=0.2068, simple_loss=0.2877, pruned_loss=0.06289, over 4922.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2699, pruned_loss=0.07181, over 957362.86 frames. ], batch size: 42, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:14:51,556 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-32000.pt +2023-04-26 18:15:00,851 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=32011.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:15:12,029 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.734e+02 2.033e+02 2.508e+02 4.432e+02, threshold=4.066e+02, percent-clipped=0.0 +2023-04-26 18:15:19,664 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7116, 1.1007, 1.6465, 2.0353, 1.7988, 1.5972, 1.6242, 1.6989], + device='cuda:0'), covar=tensor([1.0663, 1.3311, 1.4850, 1.8618, 1.2596, 1.7297, 1.7256, 1.4555], + device='cuda:0'), in_proj_covar=tensor([0.0415, 0.0443, 0.0527, 0.0549, 0.0444, 0.0464, 0.0478, 0.0477], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:15:24,408 INFO [finetune.py:976] (0/7) Epoch 6, batch 3400, loss[loss=0.1895, simple_loss=0.2627, pruned_loss=0.05812, over 4887.00 frames. ], tot_loss[loss=0.2087, simple_loss=0.2715, pruned_loss=0.07292, over 954544.41 frames. ], batch size: 43, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:15:55,453 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32060.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:16:04,735 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32065.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:16:06,537 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7825, 2.4572, 1.8291, 1.5823, 1.2924, 1.3227, 1.9437, 1.2290], + device='cuda:0'), covar=tensor([0.2037, 0.1630, 0.1848, 0.2344, 0.2858, 0.2303, 0.1296, 0.2491], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0220, 0.0176, 0.0206, 0.0211, 0.0187, 0.0168, 0.0193], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:16:10,599 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6179, 1.0307, 4.9698, 4.6414, 4.3763, 4.7427, 4.5080, 4.3439], + device='cuda:0'), covar=tensor([0.7319, 0.6294, 0.0918, 0.1706, 0.0938, 0.1050, 0.1182, 0.1452], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0304, 0.0412, 0.0415, 0.0350, 0.0407, 0.0314, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:16:20,371 INFO [finetune.py:976] (0/7) Epoch 6, batch 3450, loss[loss=0.2304, simple_loss=0.2997, pruned_loss=0.08057, over 4896.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2702, pruned_loss=0.07157, over 955569.49 frames. ], batch size: 37, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:16:29,719 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-26 18:16:37,409 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=32113.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:16:47,433 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.699e+02 2.063e+02 2.455e+02 6.391e+02, threshold=4.126e+02, percent-clipped=3.0 +2023-04-26 18:16:47,571 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9921, 2.0500, 1.8898, 1.6060, 2.2210, 1.7918, 2.7173, 1.6659], + device='cuda:0'), covar=tensor([0.4701, 0.2112, 0.5347, 0.3788, 0.2062, 0.2931, 0.1781, 0.4703], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0355, 0.0435, 0.0366, 0.0392, 0.0383, 0.0388, 0.0416], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:16:54,107 INFO [finetune.py:976] (0/7) Epoch 6, batch 3500, loss[loss=0.2123, simple_loss=0.2789, pruned_loss=0.0729, over 4830.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2669, pruned_loss=0.07005, over 954941.73 frames. ], batch size: 38, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:16:57,308 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32144.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:16:59,132 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32147.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 18:17:21,482 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32179.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:17:33,407 INFO [finetune.py:976] (0/7) Epoch 6, batch 3550, loss[loss=0.1892, simple_loss=0.2365, pruned_loss=0.07093, over 4301.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2648, pruned_loss=0.07001, over 955291.26 frames. ], batch size: 65, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:17:43,424 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=32195.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 18:18:29,258 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.147e+02 1.692e+02 2.038e+02 2.540e+02 1.815e+03, threshold=4.076e+02, percent-clipped=3.0 +2023-04-26 18:18:38,660 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0708, 0.9688, 1.2582, 1.1597, 1.0423, 0.8915, 1.0227, 0.5962], + device='cuda:0'), covar=tensor([0.0617, 0.0740, 0.0550, 0.0664, 0.0768, 0.1253, 0.0488, 0.0919], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0074, 0.0073, 0.0067, 0.0078, 0.0094, 0.0081, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:18:39,230 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32234.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:18:47,842 INFO [finetune.py:976] (0/7) Epoch 6, batch 3600, loss[loss=0.1501, simple_loss=0.2207, pruned_loss=0.03972, over 4766.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.261, pruned_loss=0.06807, over 954248.63 frames. ], batch size: 28, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:18:51,171 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-26 18:19:09,883 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-26 18:19:25,128 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32271.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:19:55,460 INFO [finetune.py:976] (0/7) Epoch 6, batch 3650, loss[loss=0.2129, simple_loss=0.2747, pruned_loss=0.07548, over 4926.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2625, pruned_loss=0.06895, over 954128.70 frames. ], batch size: 38, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:20:05,673 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3149, 1.1162, 1.5775, 1.5002, 1.2180, 1.0013, 1.2417, 0.8612], + device='cuda:0'), covar=tensor([0.0727, 0.0932, 0.0545, 0.0762, 0.0972, 0.1399, 0.0677, 0.0845], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0075, 0.0073, 0.0067, 0.0078, 0.0095, 0.0080, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:20:16,830 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=32306.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:20:43,397 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.808e+02 2.117e+02 2.519e+02 3.732e+02, threshold=4.235e+02, percent-clipped=0.0 +2023-04-26 18:21:03,307 INFO [finetune.py:976] (0/7) Epoch 6, batch 3700, loss[loss=0.215, simple_loss=0.2809, pruned_loss=0.0745, over 4910.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2675, pruned_loss=0.07105, over 952649.04 frames. ], batch size: 36, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:21:26,795 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32360.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:22:04,003 INFO [finetune.py:976] (0/7) Epoch 6, batch 3750, loss[loss=0.187, simple_loss=0.2664, pruned_loss=0.05381, over 4899.00 frames. ], tot_loss[loss=0.2067, simple_loss=0.2696, pruned_loss=0.07194, over 954644.56 frames. ], batch size: 36, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:22:13,512 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4664, 2.5896, 2.2781, 2.3132, 2.8047, 2.2981, 3.5776, 1.9441], + device='cuda:0'), covar=tensor([0.4591, 0.2245, 0.4922, 0.3627, 0.2012, 0.2949, 0.1715, 0.4658], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0358, 0.0437, 0.0367, 0.0395, 0.0385, 0.0389, 0.0418], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:22:21,627 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=32408.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:22:35,221 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.756e+02 2.047e+02 2.329e+02 4.527e+02, threshold=4.095e+02, percent-clipped=1.0 +2023-04-26 18:22:43,808 INFO [finetune.py:976] (0/7) Epoch 6, batch 3800, loss[loss=0.1863, simple_loss=0.2645, pruned_loss=0.05408, over 4861.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2699, pruned_loss=0.07125, over 956110.27 frames. ], batch size: 31, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:22:53,217 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32444.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:23:21,553 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32479.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:23:33,784 INFO [finetune.py:976] (0/7) Epoch 6, batch 3850, loss[loss=0.1965, simple_loss=0.2524, pruned_loss=0.07024, over 4824.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2684, pruned_loss=0.07109, over 954633.62 frames. ], batch size: 33, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:23:41,607 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=32492.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:24:18,832 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=32527.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:24:19,366 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.697e+02 2.014e+02 2.514e+02 4.116e+02, threshold=4.029e+02, percent-clipped=1.0 +2023-04-26 18:24:24,617 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32534.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:24:28,026 INFO [finetune.py:976] (0/7) Epoch 6, batch 3900, loss[loss=0.1394, simple_loss=0.2176, pruned_loss=0.03064, over 4784.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2663, pruned_loss=0.07057, over 955988.34 frames. ], batch size: 26, lr: 3.90e-03, grad_scale: 32.0 +2023-04-26 18:25:04,559 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32571.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:25:12,960 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-26 18:25:22,254 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=32582.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:25:33,446 INFO [finetune.py:976] (0/7) Epoch 6, batch 3950, loss[loss=0.162, simple_loss=0.2327, pruned_loss=0.04562, over 4780.00 frames. ], tot_loss[loss=0.2015, simple_loss=0.2635, pruned_loss=0.06975, over 954973.04 frames. ], batch size: 27, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:25:47,964 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7204, 2.0719, 1.2297, 1.4264, 2.0850, 1.7181, 1.6120, 1.6401], + device='cuda:0'), covar=tensor([0.0523, 0.0378, 0.0341, 0.0571, 0.0255, 0.0537, 0.0544, 0.0605], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 18:25:50,959 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=32606.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:25:51,028 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6161, 1.1982, 1.6982, 2.0029, 1.7786, 1.5815, 1.6163, 1.6459], + device='cuda:0'), covar=tensor([0.8227, 1.1250, 1.1692, 1.1844, 0.9261, 1.3954, 1.3674, 1.2217], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0439, 0.0526, 0.0545, 0.0443, 0.0463, 0.0475, 0.0474], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:25:58,782 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=32619.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:26:10,336 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.669e+02 1.908e+02 2.385e+02 4.552e+02, threshold=3.816e+02, percent-clipped=3.0 +2023-04-26 18:26:22,978 INFO [finetune.py:976] (0/7) Epoch 6, batch 4000, loss[loss=0.2098, simple_loss=0.2783, pruned_loss=0.07063, over 4905.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2638, pruned_loss=0.06995, over 954583.84 frames. ], batch size: 35, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:26:51,942 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=32654.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:27:19,403 INFO [finetune.py:976] (0/7) Epoch 6, batch 4050, loss[loss=0.2636, simple_loss=0.3198, pruned_loss=0.1037, over 4823.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2683, pruned_loss=0.07215, over 954462.24 frames. ], batch size: 40, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:27:25,827 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8141, 2.8092, 2.2757, 3.2461, 2.7734, 2.7415, 1.1172, 2.8071], + device='cuda:0'), covar=tensor([0.2240, 0.1805, 0.3745, 0.3183, 0.3341, 0.2241, 0.5693, 0.2822], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0221, 0.0256, 0.0313, 0.0303, 0.0255, 0.0275, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:27:46,364 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.872e+02 2.266e+02 2.687e+02 3.966e+02, threshold=4.532e+02, percent-clipped=2.0 +2023-04-26 18:27:52,927 INFO [finetune.py:976] (0/7) Epoch 6, batch 4100, loss[loss=0.2025, simple_loss=0.2602, pruned_loss=0.07241, over 4906.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2723, pruned_loss=0.07427, over 954606.91 frames. ], batch size: 43, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:28:26,746 INFO [finetune.py:976] (0/7) Epoch 6, batch 4150, loss[loss=0.1645, simple_loss=0.2335, pruned_loss=0.04776, over 4790.00 frames. ], tot_loss[loss=0.2117, simple_loss=0.2736, pruned_loss=0.07495, over 954291.87 frames. ], batch size: 29, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:29:05,515 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.362e+02 1.802e+02 2.275e+02 2.602e+02 5.008e+02, threshold=4.549e+02, percent-clipped=2.0 +2023-04-26 18:29:12,126 INFO [finetune.py:976] (0/7) Epoch 6, batch 4200, loss[loss=0.2447, simple_loss=0.3013, pruned_loss=0.09404, over 4883.00 frames. ], tot_loss[loss=0.2101, simple_loss=0.2727, pruned_loss=0.0738, over 954234.63 frames. ], batch size: 32, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:29:17,578 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6733, 1.4959, 0.5556, 1.3634, 1.5554, 1.5343, 1.4288, 1.4537], + device='cuda:0'), covar=tensor([0.0552, 0.0404, 0.0471, 0.0584, 0.0310, 0.0565, 0.0556, 0.0602], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0049, 0.0037, 0.0048, 0.0047, 0.0050], + device='cuda:0') +2023-04-26 18:29:45,429 INFO [finetune.py:976] (0/7) Epoch 6, batch 4250, loss[loss=0.2028, simple_loss=0.2588, pruned_loss=0.07335, over 4797.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2697, pruned_loss=0.07259, over 954812.24 frames. ], batch size: 51, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:30:13,035 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.159e+01 1.639e+02 1.929e+02 2.275e+02 6.095e+02, threshold=3.858e+02, percent-clipped=1.0 +2023-04-26 18:30:19,128 INFO [finetune.py:976] (0/7) Epoch 6, batch 4300, loss[loss=0.1658, simple_loss=0.2476, pruned_loss=0.04203, over 4823.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2668, pruned_loss=0.07151, over 955239.36 frames. ], batch size: 39, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:30:47,941 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=32966.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:31:19,600 INFO [finetune.py:976] (0/7) Epoch 6, batch 4350, loss[loss=0.1861, simple_loss=0.2539, pruned_loss=0.05914, over 4738.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2637, pruned_loss=0.07013, over 953381.49 frames. ], batch size: 54, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:31:32,770 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1554, 1.5354, 1.3567, 1.8992, 1.6314, 1.7954, 1.4388, 3.1369], + device='cuda:0'), covar=tensor([0.0727, 0.0785, 0.0857, 0.1147, 0.0636, 0.0540, 0.0775, 0.0245], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0040, 0.0042, 0.0045, 0.0041, 0.0040, 0.0040, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 18:32:12,330 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33027.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:32:13,431 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.910e+01 1.721e+02 1.970e+02 2.465e+02 4.001e+02, threshold=3.941e+02, percent-clipped=3.0 +2023-04-26 18:32:24,806 INFO [finetune.py:976] (0/7) Epoch 6, batch 4400, loss[loss=0.2252, simple_loss=0.2974, pruned_loss=0.07652, over 4911.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2646, pruned_loss=0.07079, over 952278.83 frames. ], batch size: 43, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:33:06,901 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33069.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:33:20,951 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4302, 1.6956, 1.6755, 2.3067, 1.8082, 2.2578, 1.7794, 4.6805], + device='cuda:0'), covar=tensor([0.0654, 0.0789, 0.0902, 0.1170, 0.0695, 0.0520, 0.0751, 0.0121], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0045, 0.0041, 0.0040, 0.0040, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 18:33:29,654 INFO [finetune.py:976] (0/7) Epoch 6, batch 4450, loss[loss=0.2687, simple_loss=0.33, pruned_loss=0.1038, over 4809.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.268, pruned_loss=0.07182, over 952270.55 frames. ], batch size: 41, lr: 3.90e-03, grad_scale: 16.0 +2023-04-26 18:33:43,444 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0135, 1.4595, 1.2821, 1.7882, 1.5451, 1.8151, 1.4085, 3.1069], + device='cuda:0'), covar=tensor([0.0703, 0.0767, 0.0853, 0.1154, 0.0668, 0.0487, 0.0723, 0.0206], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0045, 0.0041, 0.0040, 0.0040, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 18:33:54,491 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0308, 2.6904, 2.0028, 2.0609, 1.5452, 1.5538, 2.0824, 1.5251], + device='cuda:0'), covar=tensor([0.1625, 0.1630, 0.1615, 0.2008, 0.2470, 0.1996, 0.1120, 0.2092], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0218, 0.0174, 0.0204, 0.0209, 0.0185, 0.0166, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 18:34:08,076 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4490, 3.5006, 0.8453, 1.7869, 1.9530, 2.4257, 1.9844, 1.0047], + device='cuda:0'), covar=tensor([0.1453, 0.0815, 0.2106, 0.1466, 0.1114, 0.1062, 0.1640, 0.1979], + device='cuda:0'), in_proj_covar=tensor([0.0121, 0.0259, 0.0144, 0.0127, 0.0138, 0.0159, 0.0123, 0.0126], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:34:12,570 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.742e+02 2.062e+02 2.633e+02 5.031e+02, threshold=4.124e+02, percent-clipped=5.0 +2023-04-26 18:34:13,333 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33130.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:34:18,685 INFO [finetune.py:976] (0/7) Epoch 6, batch 4500, loss[loss=0.1963, simple_loss=0.2599, pruned_loss=0.0664, over 4826.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2702, pruned_loss=0.07251, over 952569.82 frames. ], batch size: 39, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:34:23,815 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-26 18:34:29,009 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33155.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:34:48,975 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-26 18:34:52,377 INFO [finetune.py:976] (0/7) Epoch 6, batch 4550, loss[loss=0.2229, simple_loss=0.2851, pruned_loss=0.08032, over 4912.00 frames. ], tot_loss[loss=0.2095, simple_loss=0.272, pruned_loss=0.07343, over 951905.64 frames. ], batch size: 37, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:34:54,329 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9704, 2.4973, 2.0753, 2.3395, 1.7928, 2.0812, 2.0481, 1.7620], + device='cuda:0'), covar=tensor([0.2611, 0.1521, 0.0953, 0.1404, 0.3290, 0.1355, 0.2365, 0.2905], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0323, 0.0234, 0.0297, 0.0318, 0.0276, 0.0264, 0.0288], + device='cuda:0'), out_proj_covar=tensor([1.2322e-04, 1.3087e-04, 9.4978e-05, 1.1919e-04, 1.3076e-04, 1.1165e-04, + 1.0853e-04, 1.1593e-04], device='cuda:0') +2023-04-26 18:34:56,767 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8028, 2.2371, 1.8629, 2.1199, 1.6133, 1.8450, 1.8260, 1.5633], + device='cuda:0'), covar=tensor([0.2106, 0.1466, 0.0911, 0.1170, 0.3183, 0.1258, 0.2044, 0.2491], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0323, 0.0234, 0.0297, 0.0318, 0.0276, 0.0264, 0.0288], + device='cuda:0'), out_proj_covar=tensor([1.2321e-04, 1.3086e-04, 9.4997e-05, 1.1919e-04, 1.3079e-04, 1.1165e-04, + 1.0854e-04, 1.1593e-04], device='cuda:0') +2023-04-26 18:35:09,507 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33216.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:35:18,794 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.680e+02 1.969e+02 2.447e+02 4.378e+02, threshold=3.937e+02, percent-clipped=2.0 +2023-04-26 18:35:25,829 INFO [finetune.py:976] (0/7) Epoch 6, batch 4600, loss[loss=0.2092, simple_loss=0.2703, pruned_loss=0.07402, over 4809.00 frames. ], tot_loss[loss=0.209, simple_loss=0.2718, pruned_loss=0.07307, over 952629.96 frames. ], batch size: 40, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:35:52,917 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6032, 1.2470, 1.6986, 1.9506, 1.7262, 1.5825, 1.6393, 1.6311], + device='cuda:0'), covar=tensor([0.8102, 1.1386, 1.0968, 1.1655, 0.9384, 1.2227, 1.3355, 1.1134], + device='cuda:0'), in_proj_covar=tensor([0.0414, 0.0440, 0.0526, 0.0546, 0.0444, 0.0462, 0.0475, 0.0474], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:35:59,786 INFO [finetune.py:976] (0/7) Epoch 6, batch 4650, loss[loss=0.2663, simple_loss=0.3114, pruned_loss=0.1106, over 4828.00 frames. ], tot_loss[loss=0.2065, simple_loss=0.2687, pruned_loss=0.07218, over 952371.54 frames. ], batch size: 49, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:36:20,285 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33322.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:36:22,724 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1612, 1.4653, 1.4869, 1.5920, 1.5274, 1.6936, 1.6435, 1.5791], + device='cuda:0'), covar=tensor([0.6800, 0.9075, 0.7746, 0.7415, 0.8300, 1.2105, 0.8867, 0.8191], + device='cuda:0'), in_proj_covar=tensor([0.0318, 0.0392, 0.0318, 0.0327, 0.0343, 0.0409, 0.0373, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:36:25,472 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.668e+02 2.007e+02 2.366e+02 4.187e+02, threshold=4.014e+02, percent-clipped=2.0 +2023-04-26 18:36:32,593 INFO [finetune.py:976] (0/7) Epoch 6, batch 4700, loss[loss=0.1754, simple_loss=0.2324, pruned_loss=0.05926, over 4722.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2653, pruned_loss=0.07042, over 952682.80 frames. ], batch size: 59, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:37:20,897 INFO [finetune.py:976] (0/7) Epoch 6, batch 4750, loss[loss=0.1491, simple_loss=0.2128, pruned_loss=0.04272, over 4896.00 frames. ], tot_loss[loss=0.2016, simple_loss=0.2636, pruned_loss=0.06974, over 954793.76 frames. ], batch size: 35, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:37:42,268 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.5986, 4.5692, 3.1333, 5.3483, 4.7230, 4.5939, 2.1827, 4.5965], + device='cuda:0'), covar=tensor([0.1602, 0.1085, 0.3115, 0.1094, 0.4825, 0.1805, 0.5627, 0.2019], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0219, 0.0253, 0.0311, 0.0303, 0.0255, 0.0275, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:38:06,053 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33425.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:38:14,210 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.777e+02 2.013e+02 2.553e+02 6.681e+02, threshold=4.026e+02, percent-clipped=2.0 +2023-04-26 18:38:27,280 INFO [finetune.py:976] (0/7) Epoch 6, batch 4800, loss[loss=0.2804, simple_loss=0.3385, pruned_loss=0.1112, over 4899.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2658, pruned_loss=0.07135, over 954618.97 frames. ], batch size: 35, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:39:01,126 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33472.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:39:08,372 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-26 18:39:12,306 INFO [finetune.py:976] (0/7) Epoch 6, batch 4850, loss[loss=0.1821, simple_loss=0.2409, pruned_loss=0.06168, over 4179.00 frames. ], tot_loss[loss=0.2063, simple_loss=0.2687, pruned_loss=0.07198, over 953760.89 frames. ], batch size: 18, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:39:14,871 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-26 18:39:26,718 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-26 18:39:27,149 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33511.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:39:38,470 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.796e+02 2.126e+02 2.490e+02 5.374e+02, threshold=4.252e+02, percent-clipped=2.0 +2023-04-26 18:39:41,039 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33533.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:39:44,984 INFO [finetune.py:976] (0/7) Epoch 6, batch 4900, loss[loss=0.1921, simple_loss=0.2595, pruned_loss=0.06236, over 4925.00 frames. ], tot_loss[loss=0.2077, simple_loss=0.2703, pruned_loss=0.07253, over 955046.50 frames. ], batch size: 29, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:39:48,663 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3994, 2.5650, 2.2027, 2.2945, 2.7651, 2.2612, 3.6412, 1.8923], + device='cuda:0'), covar=tensor([0.4511, 0.2394, 0.5319, 0.4038, 0.2338, 0.3028, 0.1679, 0.4914], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0355, 0.0438, 0.0365, 0.0394, 0.0385, 0.0386, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:40:07,282 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-26 18:40:10,256 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6603, 1.3302, 1.7778, 2.0775, 1.7615, 1.6265, 1.6682, 1.7062], + device='cuda:0'), covar=tensor([0.9099, 1.2506, 1.2312, 1.3514, 1.0847, 1.4754, 1.5154, 1.2601], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0440, 0.0524, 0.0545, 0.0442, 0.0462, 0.0474, 0.0473], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:40:18,323 INFO [finetune.py:976] (0/7) Epoch 6, batch 4950, loss[loss=0.2107, simple_loss=0.2777, pruned_loss=0.07184, over 4769.00 frames. ], tot_loss[loss=0.2088, simple_loss=0.2719, pruned_loss=0.07284, over 955244.07 frames. ], batch size: 28, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:40:40,489 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33622.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:40:44,643 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.256e+02 1.697e+02 1.943e+02 2.276e+02 3.620e+02, threshold=3.887e+02, percent-clipped=0.0 +2023-04-26 18:40:51,672 INFO [finetune.py:976] (0/7) Epoch 6, batch 5000, loss[loss=0.2237, simple_loss=0.2774, pruned_loss=0.08497, over 4892.00 frames. ], tot_loss[loss=0.2066, simple_loss=0.2697, pruned_loss=0.07169, over 956730.20 frames. ], batch size: 35, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:41:19,710 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8904, 1.8465, 2.1101, 2.2725, 1.7682, 1.4161, 2.0592, 1.0982], + device='cuda:0'), covar=tensor([0.1085, 0.0890, 0.0749, 0.0924, 0.0936, 0.1463, 0.0790, 0.1200], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0076, 0.0074, 0.0068, 0.0079, 0.0096, 0.0081, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:41:24,488 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=33670.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:41:28,779 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33677.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:41:30,045 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.92 vs. limit=5.0 +2023-04-26 18:41:36,417 INFO [finetune.py:976] (0/7) Epoch 6, batch 5050, loss[loss=0.1638, simple_loss=0.2308, pruned_loss=0.04837, over 4844.00 frames. ], tot_loss[loss=0.2042, simple_loss=0.267, pruned_loss=0.07074, over 957653.08 frames. ], batch size: 47, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:42:01,172 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33725.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:42:03,508 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.184e+02 1.719e+02 2.034e+02 2.333e+02 5.936e+02, threshold=4.068e+02, percent-clipped=3.0 +2023-04-26 18:42:06,113 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-04-26 18:42:14,938 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=33738.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 18:42:15,447 INFO [finetune.py:976] (0/7) Epoch 6, batch 5100, loss[loss=0.2457, simple_loss=0.298, pruned_loss=0.09664, over 4831.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.264, pruned_loss=0.07034, over 957010.64 frames. ], batch size: 51, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:43:00,671 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=33773.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:43:09,924 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8149, 2.4986, 1.8512, 1.7009, 1.3731, 1.4209, 1.9765, 1.2726], + device='cuda:0'), covar=tensor([0.2033, 0.1508, 0.1660, 0.2184, 0.2866, 0.2251, 0.1257, 0.2407], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0220, 0.0176, 0.0208, 0.0212, 0.0187, 0.0167, 0.0192], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:43:21,823 INFO [finetune.py:976] (0/7) Epoch 6, batch 5150, loss[loss=0.2289, simple_loss=0.3001, pruned_loss=0.07881, over 4851.00 frames. ], tot_loss[loss=0.2012, simple_loss=0.2628, pruned_loss=0.06978, over 955895.36 frames. ], batch size: 49, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:43:42,909 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=33811.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:43:54,707 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=33828.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:43:55,206 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.912e+02 2.137e+02 2.570e+02 6.415e+02, threshold=4.274e+02, percent-clipped=3.0 +2023-04-26 18:44:04,829 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3033, 1.1990, 1.5653, 1.4984, 1.2025, 1.1096, 1.2713, 0.7847], + device='cuda:0'), covar=tensor([0.0700, 0.0811, 0.0585, 0.0844, 0.1018, 0.1342, 0.0705, 0.0942], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0075, 0.0073, 0.0068, 0.0079, 0.0096, 0.0081, 0.0077], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:44:07,686 INFO [finetune.py:976] (0/7) Epoch 6, batch 5200, loss[loss=0.2187, simple_loss=0.2829, pruned_loss=0.07726, over 4850.00 frames. ], tot_loss[loss=0.203, simple_loss=0.266, pruned_loss=0.07006, over 956808.46 frames. ], batch size: 49, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:44:38,043 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=33859.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:44:49,950 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9527, 1.0105, 3.1273, 2.6688, 2.8244, 2.9607, 3.0239, 2.6410], + device='cuda:0'), covar=tensor([1.0081, 0.8160, 0.2792, 0.4403, 0.3104, 0.4467, 0.3237, 0.3708], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0308, 0.0413, 0.0417, 0.0353, 0.0408, 0.0316, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:45:14,782 INFO [finetune.py:976] (0/7) Epoch 6, batch 5250, loss[loss=0.2052, simple_loss=0.2718, pruned_loss=0.06927, over 4821.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2688, pruned_loss=0.07078, over 958112.55 frames. ], batch size: 33, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:45:25,457 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.45 vs. limit=5.0 +2023-04-26 18:45:58,333 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.714e+02 2.064e+02 2.470e+02 4.995e+02, threshold=4.129e+02, percent-clipped=2.0 +2023-04-26 18:46:04,411 INFO [finetune.py:976] (0/7) Epoch 6, batch 5300, loss[loss=0.1726, simple_loss=0.2476, pruned_loss=0.04882, over 4782.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.2693, pruned_loss=0.07076, over 957404.93 frames. ], batch size: 26, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:46:04,528 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=33939.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:46:21,704 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.2565, 4.1639, 2.9936, 4.8019, 4.1892, 4.2231, 2.2616, 4.2144], + device='cuda:0'), covar=tensor([0.1672, 0.1068, 0.3154, 0.1327, 0.2618, 0.1905, 0.5272, 0.2343], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0219, 0.0253, 0.0310, 0.0302, 0.0254, 0.0274, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:46:38,099 INFO [finetune.py:976] (0/7) Epoch 6, batch 5350, loss[loss=0.2017, simple_loss=0.2609, pruned_loss=0.07124, over 4826.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2701, pruned_loss=0.0711, over 957776.46 frames. ], batch size: 30, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:46:45,363 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-34000.pt +2023-04-26 18:46:46,587 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34000.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:47:06,984 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.964e+01 1.645e+02 1.895e+02 2.308e+02 4.781e+02, threshold=3.789e+02, percent-clipped=2.0 +2023-04-26 18:47:09,496 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34033.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 18:47:13,056 INFO [finetune.py:976] (0/7) Epoch 6, batch 5400, loss[loss=0.1909, simple_loss=0.2543, pruned_loss=0.06373, over 4796.00 frames. ], tot_loss[loss=0.2041, simple_loss=0.2671, pruned_loss=0.07051, over 956928.94 frames. ], batch size: 51, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:47:16,178 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34044.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:47:46,753 INFO [finetune.py:976] (0/7) Epoch 6, batch 5450, loss[loss=0.1791, simple_loss=0.2386, pruned_loss=0.05975, over 4822.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2647, pruned_loss=0.07002, over 956659.72 frames. ], batch size: 41, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:47:57,052 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34105.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:48:11,960 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34128.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:48:12,951 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.622e+02 2.010e+02 2.382e+02 4.007e+02, threshold=4.020e+02, percent-clipped=1.0 +2023-04-26 18:48:25,399 INFO [finetune.py:976] (0/7) Epoch 6, batch 5500, loss[loss=0.2028, simple_loss=0.2569, pruned_loss=0.07431, over 4931.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2615, pruned_loss=0.06883, over 957369.72 frames. ], batch size: 38, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:48:29,179 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34145.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:48:58,805 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9186, 3.6793, 2.7605, 4.4916, 3.8734, 3.9010, 1.8191, 3.7752], + device='cuda:0'), covar=tensor([0.1330, 0.1173, 0.3435, 0.1197, 0.2953, 0.1584, 0.5161, 0.2135], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0218, 0.0251, 0.0308, 0.0300, 0.0252, 0.0272, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:49:10,832 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=34176.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:49:30,902 INFO [finetune.py:976] (0/7) Epoch 6, batch 5550, loss[loss=0.1893, simple_loss=0.2546, pruned_loss=0.06199, over 4768.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2642, pruned_loss=0.07029, over 955334.06 frames. ], batch size: 28, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:49:52,162 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34206.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:50:24,792 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.272e+02 1.890e+02 2.190e+02 2.677e+02 6.143e+02, threshold=4.380e+02, percent-clipped=3.0 +2023-04-26 18:50:36,984 INFO [finetune.py:976] (0/7) Epoch 6, batch 5600, loss[loss=0.2119, simple_loss=0.2829, pruned_loss=0.07044, over 4863.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2674, pruned_loss=0.07103, over 952748.05 frames. ], batch size: 31, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:51:12,248 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-26 18:51:28,269 INFO [finetune.py:976] (0/7) Epoch 6, batch 5650, loss[loss=0.2157, simple_loss=0.2858, pruned_loss=0.07283, over 4797.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2707, pruned_loss=0.07181, over 952792.76 frames. ], batch size: 51, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:51:31,890 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34295.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:51:32,580 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2973, 1.7463, 2.2441, 2.7236, 2.1242, 1.6566, 1.2525, 2.0375], + device='cuda:0'), covar=tensor([0.4121, 0.4427, 0.1824, 0.2962, 0.3589, 0.3472, 0.5444, 0.3045], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0254, 0.0218, 0.0323, 0.0214, 0.0229, 0.0239, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:52:02,999 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.702e+02 2.093e+02 2.545e+02 5.557e+02, threshold=4.186e+02, percent-clipped=1.0 +2023-04-26 18:52:05,468 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34333.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:52:09,023 INFO [finetune.py:976] (0/7) Epoch 6, batch 5700, loss[loss=0.1115, simple_loss=0.1678, pruned_loss=0.02757, over 4136.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.265, pruned_loss=0.07088, over 932032.95 frames. ], batch size: 18, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:52:22,279 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-26 18:52:25,222 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-6.pt +2023-04-26 18:52:39,656 INFO [finetune.py:976] (0/7) Epoch 7, batch 0, loss[loss=0.158, simple_loss=0.2348, pruned_loss=0.04067, over 4718.00 frames. ], tot_loss[loss=0.158, simple_loss=0.2348, pruned_loss=0.04067, over 4718.00 frames. ], batch size: 27, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:52:39,657 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 18:52:50,220 INFO [finetune.py:1010] (0/7) Epoch 7, validation: loss=0.1579, simple_loss=0.2317, pruned_loss=0.04207, over 2265189.00 frames. +2023-04-26 18:52:50,221 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 18:52:59,337 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=34381.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:53:01,216 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6758, 1.9422, 1.8407, 2.0490, 1.7880, 2.0062, 1.9927, 1.9435], + device='cuda:0'), covar=tensor([0.6504, 1.0425, 0.8072, 0.7243, 0.8681, 1.1988, 1.0329, 0.9251], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0392, 0.0317, 0.0327, 0.0343, 0.0409, 0.0372, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:53:11,518 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34400.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:53:23,171 INFO [finetune.py:976] (0/7) Epoch 7, batch 50, loss[loss=0.1963, simple_loss=0.2565, pruned_loss=0.06806, over 4855.00 frames. ], tot_loss[loss=0.2102, simple_loss=0.2738, pruned_loss=0.07328, over 216822.23 frames. ], batch size: 31, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:53:32,046 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.800e+02 2.094e+02 2.566e+02 4.468e+02, threshold=4.189e+02, percent-clipped=1.0 +2023-04-26 18:53:56,443 INFO [finetune.py:976] (0/7) Epoch 7, batch 100, loss[loss=0.2292, simple_loss=0.2732, pruned_loss=0.0926, over 4750.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2641, pruned_loss=0.06888, over 380695.91 frames. ], batch size: 54, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:54:19,346 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34501.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:54:29,756 INFO [finetune.py:976] (0/7) Epoch 7, batch 150, loss[loss=0.1663, simple_loss=0.245, pruned_loss=0.04383, over 4931.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2595, pruned_loss=0.06787, over 509408.87 frames. ], batch size: 33, lr: 3.89e-03, grad_scale: 16.0 +2023-04-26 18:54:39,165 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.717e+02 2.069e+02 2.417e+02 7.374e+02, threshold=4.138e+02, percent-clipped=4.0 +2023-04-26 18:55:03,720 INFO [finetune.py:976] (0/7) Epoch 7, batch 200, loss[loss=0.2207, simple_loss=0.2643, pruned_loss=0.0886, over 4891.00 frames. ], tot_loss[loss=0.198, simple_loss=0.259, pruned_loss=0.06849, over 609236.22 frames. ], batch size: 35, lr: 3.89e-03, grad_scale: 32.0 +2023-04-26 18:55:27,244 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34593.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:55:33,681 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34595.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:55:43,916 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8604, 1.3619, 1.7211, 1.9597, 1.7193, 1.2813, 1.0003, 1.4420], + device='cuda:0'), covar=tensor([0.3794, 0.4119, 0.1909, 0.2797, 0.3241, 0.3206, 0.4990, 0.2940], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0254, 0.0217, 0.0322, 0.0214, 0.0228, 0.0238, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 18:56:04,710 INFO [finetune.py:976] (0/7) Epoch 7, batch 250, loss[loss=0.206, simple_loss=0.2763, pruned_loss=0.06786, over 4932.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2638, pruned_loss=0.07016, over 684540.86 frames. ], batch size: 33, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 18:56:04,811 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1600, 1.5595, 1.2966, 1.7434, 1.5450, 2.0849, 1.2960, 3.6829], + device='cuda:0'), covar=tensor([0.0689, 0.0825, 0.0903, 0.1317, 0.0710, 0.0518, 0.0826, 0.0150], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0044, 0.0040, 0.0040, 0.0039, 0.0060], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 18:56:09,047 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34622.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:56:17,341 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34626.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:56:19,026 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.883e+02 2.183e+02 2.769e+02 4.729e+02, threshold=4.366e+02, percent-clipped=3.0 +2023-04-26 18:56:30,104 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0550, 0.9577, 1.2270, 1.1537, 1.0196, 0.9003, 0.9517, 0.4810], + device='cuda:0'), covar=tensor([0.0708, 0.0881, 0.0640, 0.0761, 0.0845, 0.1540, 0.0580, 0.1083], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0075, 0.0073, 0.0067, 0.0078, 0.0095, 0.0080, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 18:56:39,247 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=34643.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:56:48,848 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.2601, 3.2800, 2.4301, 3.8101, 3.3253, 3.3145, 1.5199, 3.1517], + device='cuda:0'), covar=tensor([0.2158, 0.1401, 0.3598, 0.2474, 0.3168, 0.1981, 0.5846, 0.3055], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0219, 0.0254, 0.0310, 0.0302, 0.0253, 0.0274, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:56:50,787 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3438, 2.3089, 1.9151, 2.1475, 2.4976, 1.8958, 3.2200, 1.7377], + device='cuda:0'), covar=tensor([0.4812, 0.2492, 0.5310, 0.3822, 0.2688, 0.3118, 0.2155, 0.4621], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0355, 0.0438, 0.0365, 0.0394, 0.0385, 0.0387, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 18:56:51,379 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34654.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:57:11,720 INFO [finetune.py:976] (0/7) Epoch 7, batch 300, loss[loss=0.1888, simple_loss=0.2671, pruned_loss=0.05522, over 4836.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2681, pruned_loss=0.07062, over 745188.31 frames. ], batch size: 47, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 18:57:35,508 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34683.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 18:57:37,960 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34687.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:57:51,402 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34700.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:58:03,708 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=34710.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 18:58:12,744 INFO [finetune.py:976] (0/7) Epoch 7, batch 350, loss[loss=0.2461, simple_loss=0.2936, pruned_loss=0.0993, over 4751.00 frames. ], tot_loss[loss=0.207, simple_loss=0.2705, pruned_loss=0.07174, over 790223.40 frames. ], batch size: 27, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 18:58:28,031 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.776e+02 2.211e+02 2.674e+02 4.769e+02, threshold=4.422e+02, percent-clipped=1.0 +2023-04-26 18:58:50,330 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=34748.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:59:02,246 INFO [finetune.py:976] (0/7) Epoch 7, batch 400, loss[loss=0.188, simple_loss=0.2476, pruned_loss=0.06419, over 4798.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2708, pruned_loss=0.07139, over 828497.39 frames. ], batch size: 25, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 18:59:05,942 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=34771.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 18:59:14,096 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2263, 1.3754, 1.5193, 1.6997, 1.6060, 1.7656, 1.6804, 1.6168], + device='cuda:0'), covar=tensor([0.5761, 0.8363, 0.7264, 0.6843, 0.8406, 1.1709, 0.8575, 0.7341], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0392, 0.0317, 0.0328, 0.0343, 0.0409, 0.0373, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 18:59:20,684 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1129, 1.6682, 2.0749, 2.4192, 2.0271, 1.5527, 1.3235, 1.7891], + device='cuda:0'), covar=tensor([0.4580, 0.4288, 0.1978, 0.3162, 0.3374, 0.3339, 0.5282, 0.3021], + device='cuda:0'), in_proj_covar=tensor([0.0277, 0.0254, 0.0217, 0.0323, 0.0213, 0.0228, 0.0238, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 18:59:26,227 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=34801.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 18:59:26,399 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 18:59:36,186 INFO [finetune.py:976] (0/7) Epoch 7, batch 450, loss[loss=0.2137, simple_loss=0.2756, pruned_loss=0.07588, over 4831.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2688, pruned_loss=0.0704, over 857357.16 frames. ], batch size: 39, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 18:59:45,537 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.735e+02 2.065e+02 2.383e+02 5.194e+02, threshold=4.130e+02, percent-clipped=2.0 +2023-04-26 18:59:58,661 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=34849.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:00:09,433 INFO [finetune.py:976] (0/7) Epoch 7, batch 500, loss[loss=0.1845, simple_loss=0.2455, pruned_loss=0.0617, over 4913.00 frames. ], tot_loss[loss=0.204, simple_loss=0.267, pruned_loss=0.0705, over 878643.16 frames. ], batch size: 37, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:00:42,229 INFO [finetune.py:976] (0/7) Epoch 7, batch 550, loss[loss=0.1898, simple_loss=0.2569, pruned_loss=0.06132, over 4818.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2643, pruned_loss=0.06986, over 895028.83 frames. ], batch size: 30, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:00:51,085 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.669e+02 2.030e+02 2.418e+02 4.759e+02, threshold=4.059e+02, percent-clipped=2.0 +2023-04-26 19:01:01,275 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4759, 1.8983, 2.2774, 2.9266, 2.3370, 1.7488, 1.6577, 2.2076], + device='cuda:0'), covar=tensor([0.4128, 0.4182, 0.1871, 0.3206, 0.3505, 0.3286, 0.5156, 0.3211], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0256, 0.0219, 0.0325, 0.0215, 0.0230, 0.0240, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 19:01:10,913 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34949.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:01:25,773 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9053, 2.5350, 2.0092, 2.3878, 1.7030, 1.9124, 2.1260, 1.7256], + device='cuda:0'), covar=tensor([0.2290, 0.1237, 0.0983, 0.1221, 0.2909, 0.1291, 0.1840, 0.2603], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0324, 0.0237, 0.0297, 0.0316, 0.0277, 0.0264, 0.0287], + device='cuda:0'), out_proj_covar=tensor([1.2409e-04, 1.3127e-04, 9.6040e-05, 1.1915e-04, 1.3022e-04, 1.1181e-04, + 1.0830e-04, 1.1557e-04], device='cuda:0') +2023-04-26 19:01:32,182 INFO [finetune.py:976] (0/7) Epoch 7, batch 600, loss[loss=0.2386, simple_loss=0.3056, pruned_loss=0.08578, over 4871.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2651, pruned_loss=0.07044, over 909623.38 frames. ], batch size: 44, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:01:45,323 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34978.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 19:01:53,767 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=34982.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:01:56,129 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4067, 1.2526, 4.1323, 3.8578, 3.6950, 3.9117, 3.9267, 3.6510], + device='cuda:0'), covar=tensor([0.7242, 0.5972, 0.1096, 0.1817, 0.1153, 0.1831, 0.1466, 0.1628], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0306, 0.0409, 0.0413, 0.0351, 0.0406, 0.0314, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:02:27,943 INFO [finetune.py:976] (0/7) Epoch 7, batch 650, loss[loss=0.1847, simple_loss=0.2548, pruned_loss=0.0573, over 4924.00 frames. ], tot_loss[loss=0.2062, simple_loss=0.2689, pruned_loss=0.07169, over 919570.08 frames. ], batch size: 38, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:02:30,562 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0733, 1.5458, 1.9342, 2.1151, 1.8920, 1.4734, 1.0025, 1.5131], + device='cuda:0'), covar=tensor([0.3808, 0.4130, 0.1920, 0.2830, 0.3255, 0.3347, 0.5512, 0.3151], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0256, 0.0219, 0.0325, 0.0215, 0.0230, 0.0240, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 19:02:42,342 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.760e+02 2.002e+02 2.405e+02 4.301e+02, threshold=4.004e+02, percent-clipped=1.0 +2023-04-26 19:03:19,051 INFO [finetune.py:976] (0/7) Epoch 7, batch 700, loss[loss=0.2348, simple_loss=0.2729, pruned_loss=0.09835, over 3959.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2704, pruned_loss=0.07169, over 926935.97 frames. ], batch size: 17, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:03:19,115 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35066.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 19:03:36,975 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-04-26 19:03:47,929 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35092.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:04:02,379 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8972, 1.5528, 1.9058, 2.1662, 2.2111, 1.7984, 1.4246, 1.9878], + device='cuda:0'), covar=tensor([0.0920, 0.1269, 0.0704, 0.0718, 0.0710, 0.0983, 0.1044, 0.0652], + device='cuda:0'), in_proj_covar=tensor([0.0204, 0.0209, 0.0183, 0.0180, 0.0182, 0.0198, 0.0167, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:04:19,900 INFO [finetune.py:976] (0/7) Epoch 7, batch 750, loss[loss=0.194, simple_loss=0.2494, pruned_loss=0.06928, over 4796.00 frames. ], tot_loss[loss=0.2076, simple_loss=0.2713, pruned_loss=0.0719, over 930239.07 frames. ], batch size: 25, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:04:33,726 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.705e+02 2.087e+02 2.408e+02 7.580e+02, threshold=4.175e+02, percent-clipped=5.0 +2023-04-26 19:05:05,746 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35153.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:05:25,181 INFO [finetune.py:976] (0/7) Epoch 7, batch 800, loss[loss=0.193, simple_loss=0.2554, pruned_loss=0.06536, over 4796.00 frames. ], tot_loss[loss=0.2074, simple_loss=0.2713, pruned_loss=0.07172, over 936947.22 frames. ], batch size: 25, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:06:20,320 INFO [finetune.py:976] (0/7) Epoch 7, batch 850, loss[loss=0.2187, simple_loss=0.2603, pruned_loss=0.08854, over 4322.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2688, pruned_loss=0.07081, over 941651.78 frames. ], batch size: 18, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:06:21,016 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1107, 0.6363, 0.9421, 0.8035, 1.2380, 0.9517, 0.7822, 1.0013], + device='cuda:0'), covar=tensor([0.1516, 0.1493, 0.2141, 0.1505, 0.1003, 0.1320, 0.1615, 0.1885], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0331, 0.0353, 0.0303, 0.0340, 0.0330, 0.0308, 0.0354], + device='cuda:0'), out_proj_covar=tensor([6.6238e-05, 7.0686e-05, 7.6478e-05, 6.3022e-05, 7.1530e-05, 7.1181e-05, + 6.6650e-05, 7.6166e-05], device='cuda:0') +2023-04-26 19:06:32,703 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.732e+02 2.031e+02 2.393e+02 4.995e+02, threshold=4.061e+02, percent-clipped=2.0 +2023-04-26 19:06:58,280 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35249.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:07:20,078 INFO [finetune.py:976] (0/7) Epoch 7, batch 900, loss[loss=0.1403, simple_loss=0.2165, pruned_loss=0.03206, over 4766.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2655, pruned_loss=0.06972, over 943298.40 frames. ], batch size: 28, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:07:38,475 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35278.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 19:07:40,962 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35282.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:08:02,073 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=35297.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:08:14,477 INFO [finetune.py:976] (0/7) Epoch 7, batch 950, loss[loss=0.1794, simple_loss=0.2365, pruned_loss=0.06116, over 4778.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2635, pruned_loss=0.0691, over 945490.48 frames. ], batch size: 28, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:08:27,226 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=35326.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:08:29,021 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.623e+02 1.972e+02 2.345e+02 3.927e+02, threshold=3.944e+02, percent-clipped=0.0 +2023-04-26 19:08:29,699 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=35330.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:08:40,252 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3814, 1.3010, 4.1131, 3.7658, 3.6361, 3.8251, 3.9124, 3.5547], + device='cuda:0'), covar=tensor([0.7713, 0.6146, 0.1143, 0.2193, 0.1363, 0.1758, 0.1433, 0.1778], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0308, 0.0410, 0.0414, 0.0352, 0.0407, 0.0316, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:08:50,410 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35345.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 19:09:02,534 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-26 19:09:14,040 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8512, 1.3185, 1.6485, 1.9994, 1.5842, 1.2937, 0.9329, 1.4083], + device='cuda:0'), covar=tensor([0.4169, 0.4619, 0.2186, 0.3018, 0.3497, 0.3503, 0.5972, 0.2982], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0254, 0.0218, 0.0324, 0.0214, 0.0229, 0.0239, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 19:09:20,444 INFO [finetune.py:976] (0/7) Epoch 7, batch 1000, loss[loss=0.2103, simple_loss=0.2729, pruned_loss=0.07384, over 4940.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2665, pruned_loss=0.07026, over 947996.19 frames. ], batch size: 33, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:09:20,555 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35366.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 19:09:29,962 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35374.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:09:33,073 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6017, 1.2250, 1.7195, 2.0081, 1.7643, 1.5819, 1.6460, 1.7164], + device='cuda:0'), covar=tensor([0.7700, 1.0596, 1.0980, 1.1086, 0.8825, 1.2929, 1.2959, 1.1520], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0436, 0.0520, 0.0541, 0.0440, 0.0460, 0.0472, 0.0471], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:10:06,681 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35406.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 19:10:18,212 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=35414.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 19:10:25,351 INFO [finetune.py:976] (0/7) Epoch 7, batch 1050, loss[loss=0.2107, simple_loss=0.2763, pruned_loss=0.07258, over 4897.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2684, pruned_loss=0.0707, over 950663.95 frames. ], batch size: 35, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:10:39,105 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.810e+02 2.204e+02 2.753e+02 5.526e+02, threshold=4.408e+02, percent-clipped=1.0 +2023-04-26 19:10:48,432 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35435.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:10:50,983 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-04-26 19:11:02,522 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35448.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:11:02,649 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-26 19:11:31,120 INFO [finetune.py:976] (0/7) Epoch 7, batch 1100, loss[loss=0.1941, simple_loss=0.2539, pruned_loss=0.06721, over 4800.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2695, pruned_loss=0.07133, over 951463.76 frames. ], batch size: 25, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:11:43,964 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4611, 3.5624, 0.9451, 1.9015, 1.9521, 2.4637, 2.0616, 1.0348], + device='cuda:0'), covar=tensor([0.1406, 0.0929, 0.2009, 0.1292, 0.1104, 0.1105, 0.1394, 0.2063], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0256, 0.0144, 0.0126, 0.0137, 0.0158, 0.0122, 0.0125], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 19:12:08,318 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6713, 2.0965, 1.4497, 1.2323, 1.1939, 1.1874, 1.4879, 1.1210], + device='cuda:0'), covar=tensor([0.1775, 0.1445, 0.1885, 0.2102, 0.2654, 0.2228, 0.1334, 0.2270], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0219, 0.0175, 0.0206, 0.0210, 0.0187, 0.0167, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 19:12:09,915 INFO [finetune.py:976] (0/7) Epoch 7, batch 1150, loss[loss=0.2391, simple_loss=0.3031, pruned_loss=0.08754, over 4818.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2688, pruned_loss=0.07087, over 950182.10 frames. ], batch size: 38, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:12:18,334 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.291e+02 1.835e+02 2.101e+02 2.448e+02 6.183e+02, threshold=4.202e+02, percent-clipped=2.0 +2023-04-26 19:12:19,087 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35530.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:12:25,134 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-26 19:12:26,895 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7017, 1.5537, 0.6146, 1.3483, 1.8118, 1.5533, 1.4364, 1.4501], + device='cuda:0'), covar=tensor([0.0533, 0.0420, 0.0439, 0.0603, 0.0285, 0.0592, 0.0550, 0.0631], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0026, 0.0024, 0.0031, 0.0021, 0.0030, 0.0029, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0050, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 19:12:43,336 INFO [finetune.py:976] (0/7) Epoch 7, batch 1200, loss[loss=0.228, simple_loss=0.2891, pruned_loss=0.08342, over 4901.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2687, pruned_loss=0.07151, over 951003.83 frames. ], batch size: 36, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:12:53,112 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-26 19:13:00,091 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35591.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:13:17,330 INFO [finetune.py:976] (0/7) Epoch 7, batch 1250, loss[loss=0.253, simple_loss=0.2966, pruned_loss=0.1047, over 4887.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2654, pruned_loss=0.06987, over 951456.71 frames. ], batch size: 35, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:13:26,237 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.763e+02 2.030e+02 2.428e+02 4.549e+02, threshold=4.060e+02, percent-clipped=1.0 +2023-04-26 19:13:51,306 INFO [finetune.py:976] (0/7) Epoch 7, batch 1300, loss[loss=0.1711, simple_loss=0.2481, pruned_loss=0.04711, over 4898.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2621, pruned_loss=0.06803, over 954282.96 frames. ], batch size: 32, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:13:59,110 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6292, 2.1671, 1.0460, 1.4452, 2.2320, 1.5983, 1.4854, 1.5662], + device='cuda:0'), covar=tensor([0.0552, 0.0329, 0.0372, 0.0552, 0.0263, 0.0543, 0.0525, 0.0608], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0031], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0050, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 19:14:04,025 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.04 vs. limit=5.0 +2023-04-26 19:14:05,803 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1726, 1.4631, 1.2934, 1.3515, 1.2884, 1.2640, 1.3574, 1.0550], + device='cuda:0'), covar=tensor([0.1781, 0.1274, 0.1068, 0.1327, 0.3334, 0.1306, 0.1679, 0.2101], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0325, 0.0237, 0.0300, 0.0318, 0.0280, 0.0267, 0.0289], + device='cuda:0'), out_proj_covar=tensor([1.2541e-04, 1.3189e-04, 9.5842e-05, 1.2047e-04, 1.3110e-04, 1.1288e-04, + 1.0941e-04, 1.1633e-04], device='cuda:0') +2023-04-26 19:14:14,077 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35701.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 19:14:35,192 INFO [finetune.py:976] (0/7) Epoch 7, batch 1350, loss[loss=0.1928, simple_loss=0.2639, pruned_loss=0.06088, over 4864.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2616, pruned_loss=0.06758, over 951257.82 frames. ], batch size: 31, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:14:45,270 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 19:14:48,107 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0180, 2.7124, 2.3447, 2.5601, 1.8922, 2.2605, 2.4073, 1.9534], + device='cuda:0'), covar=tensor([0.2491, 0.1631, 0.1228, 0.1592, 0.3147, 0.1676, 0.2016, 0.2526], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0325, 0.0237, 0.0300, 0.0318, 0.0279, 0.0266, 0.0289], + device='cuda:0'), out_proj_covar=tensor([1.2514e-04, 1.3167e-04, 9.5835e-05, 1.2046e-04, 1.3104e-04, 1.1279e-04, + 1.0923e-04, 1.1621e-04], device='cuda:0') +2023-04-26 19:14:54,290 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.639e+02 1.977e+02 2.381e+02 4.004e+02, threshold=3.953e+02, percent-clipped=0.0 +2023-04-26 19:14:54,987 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35730.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:15:17,752 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=35748.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:15:39,328 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-26 19:15:40,837 INFO [finetune.py:976] (0/7) Epoch 7, batch 1400, loss[loss=0.2411, simple_loss=0.2959, pruned_loss=0.09317, over 4810.00 frames. ], tot_loss[loss=0.2009, simple_loss=0.2644, pruned_loss=0.06868, over 951072.38 frames. ], batch size: 40, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:15:44,439 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35771.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:16:06,179 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=35796.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:16:07,580 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.22 vs. limit=5.0 +2023-04-26 19:16:26,225 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=35811.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:16:30,134 INFO [finetune.py:976] (0/7) Epoch 7, batch 1450, loss[loss=0.2065, simple_loss=0.2788, pruned_loss=0.06717, over 4827.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2676, pruned_loss=0.06955, over 951686.40 frames. ], batch size: 33, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:16:50,309 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.278e+02 1.770e+02 2.190e+02 2.562e+02 4.335e+02, threshold=4.381e+02, percent-clipped=3.0 +2023-04-26 19:16:57,847 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4145, 1.5008, 1.2378, 0.8984, 1.1013, 1.0672, 1.2584, 1.0179], + device='cuda:0'), covar=tensor([0.1615, 0.1421, 0.1437, 0.1882, 0.2206, 0.1787, 0.1062, 0.1842], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0218, 0.0174, 0.0205, 0.0209, 0.0187, 0.0166, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 19:16:58,465 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35832.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:16:59,079 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.3026, 1.3351, 1.4367, 0.9716, 1.3468, 1.0340, 1.7552, 1.2483], + device='cuda:0'), covar=tensor([0.3900, 0.1671, 0.4373, 0.2628, 0.1686, 0.2325, 0.1548, 0.4498], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0351, 0.0430, 0.0362, 0.0386, 0.0380, 0.0384, 0.0416], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:17:41,155 INFO [finetune.py:976] (0/7) Epoch 7, batch 1500, loss[loss=0.2468, simple_loss=0.3109, pruned_loss=0.09136, over 4797.00 frames. ], tot_loss[loss=0.2045, simple_loss=0.269, pruned_loss=0.07003, over 952875.71 frames. ], batch size: 45, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:17:46,035 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=35872.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:17:55,859 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=35886.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:18:31,248 INFO [finetune.py:976] (0/7) Epoch 7, batch 1550, loss[loss=0.2126, simple_loss=0.2777, pruned_loss=0.07373, over 4748.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2693, pruned_loss=0.07024, over 954925.98 frames. ], batch size: 59, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:18:51,453 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.799e+02 2.113e+02 2.516e+02 6.961e+02, threshold=4.225e+02, percent-clipped=2.0 +2023-04-26 19:19:35,301 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.97 vs. limit=5.0 +2023-04-26 19:19:38,744 INFO [finetune.py:976] (0/7) Epoch 7, batch 1600, loss[loss=0.1734, simple_loss=0.2407, pruned_loss=0.05302, over 4755.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.266, pruned_loss=0.06938, over 954577.44 frames. ], batch size: 27, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:20:23,934 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-36000.pt +2023-04-26 19:20:30,767 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36001.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 19:20:45,229 INFO [finetune.py:976] (0/7) Epoch 7, batch 1650, loss[loss=0.1747, simple_loss=0.2416, pruned_loss=0.05387, over 4908.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2624, pruned_loss=0.06811, over 953044.63 frames. ], batch size: 32, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:20:59,375 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.09 vs. limit=5.0 +2023-04-26 19:20:59,791 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.778e+02 2.122e+02 2.492e+02 4.114e+02, threshold=4.243e+02, percent-clipped=0.0 +2023-04-26 19:21:00,967 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36030.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:21:26,273 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=36049.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 19:21:48,199 INFO [finetune.py:976] (0/7) Epoch 7, batch 1700, loss[loss=0.2411, simple_loss=0.2926, pruned_loss=0.09476, over 4833.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2615, pruned_loss=0.0686, over 953884.99 frames. ], batch size: 33, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:21:56,084 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=36078.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:22:10,247 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.7674, 1.7531, 1.9348, 1.5106, 1.9836, 1.5850, 2.6065, 1.6138], + device='cuda:0'), covar=tensor([0.4097, 0.1893, 0.4663, 0.3077, 0.1785, 0.2565, 0.1557, 0.4617], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0354, 0.0436, 0.0364, 0.0392, 0.0385, 0.0388, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:22:21,090 INFO [finetune.py:976] (0/7) Epoch 7, batch 1750, loss[loss=0.1982, simple_loss=0.2496, pruned_loss=0.07339, over 4722.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2634, pruned_loss=0.0694, over 953400.37 frames. ], batch size: 23, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:22:23,530 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-26 19:22:28,794 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36127.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:22:29,945 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.331e+02 1.866e+02 2.133e+02 2.694e+02 4.644e+02, threshold=4.265e+02, percent-clipped=2.0 +2023-04-26 19:22:33,663 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4313, 1.0540, 1.1956, 1.1199, 1.6293, 1.2946, 1.0324, 1.1194], + device='cuda:0'), covar=tensor([0.1494, 0.1132, 0.1711, 0.1554, 0.0638, 0.1170, 0.1729, 0.1651], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0330, 0.0354, 0.0304, 0.0340, 0.0330, 0.0309, 0.0355], + device='cuda:0'), out_proj_covar=tensor([6.5904e-05, 7.0402e-05, 7.6771e-05, 6.3091e-05, 7.1525e-05, 7.1204e-05, + 6.6870e-05, 7.6479e-05], device='cuda:0') +2023-04-26 19:22:47,373 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8813, 3.2600, 1.1606, 2.0597, 2.2083, 2.3877, 2.1548, 1.2587], + device='cuda:0'), covar=tensor([0.1270, 0.1520, 0.1907, 0.1280, 0.0966, 0.1098, 0.1360, 0.1722], + device='cuda:0'), in_proj_covar=tensor([0.0121, 0.0257, 0.0145, 0.0126, 0.0137, 0.0158, 0.0122, 0.0125], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 19:22:54,678 INFO [finetune.py:976] (0/7) Epoch 7, batch 1800, loss[loss=0.1923, simple_loss=0.2628, pruned_loss=0.06093, over 4806.00 frames. ], tot_loss[loss=0.2037, simple_loss=0.2671, pruned_loss=0.07015, over 953852.73 frames. ], batch size: 51, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:22:55,366 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36167.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:23:05,456 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36181.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:23:08,480 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36186.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:23:44,253 INFO [finetune.py:976] (0/7) Epoch 7, batch 1850, loss[loss=0.1862, simple_loss=0.2401, pruned_loss=0.06613, over 4193.00 frames. ], tot_loss[loss=0.2047, simple_loss=0.2681, pruned_loss=0.07067, over 955239.59 frames. ], batch size: 65, lr: 3.88e-03, grad_scale: 32.0 +2023-04-26 19:24:03,617 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.720e+02 2.075e+02 2.567e+02 3.756e+02, threshold=4.150e+02, percent-clipped=0.0 +2023-04-26 19:24:07,662 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=36234.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:24:19,047 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36242.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:24:41,746 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6914, 3.5224, 2.8104, 4.1964, 3.6230, 3.6486, 1.4782, 3.5494], + device='cuda:0'), covar=tensor([0.1988, 0.1317, 0.2922, 0.2029, 0.2888, 0.1974, 0.5809, 0.2472], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0219, 0.0253, 0.0313, 0.0304, 0.0254, 0.0275, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 19:24:45,930 INFO [finetune.py:976] (0/7) Epoch 7, batch 1900, loss[loss=0.2158, simple_loss=0.28, pruned_loss=0.07581, over 4888.00 frames. ], tot_loss[loss=0.2048, simple_loss=0.2687, pruned_loss=0.07042, over 954875.62 frames. ], batch size: 35, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:24:47,268 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0799, 2.6399, 1.0927, 1.3724, 2.1329, 1.2242, 3.3604, 1.7966], + device='cuda:0'), covar=tensor([0.0641, 0.0708, 0.0871, 0.1264, 0.0447, 0.0992, 0.0282, 0.0615], + device='cuda:0'), in_proj_covar=tensor([0.0053, 0.0069, 0.0051, 0.0048, 0.0053, 0.0053, 0.0080, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0007], + device='cuda:0') +2023-04-26 19:25:13,707 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5873, 3.4854, 0.9714, 1.8457, 1.9911, 2.4954, 1.9890, 1.0215], + device='cuda:0'), covar=tensor([0.1504, 0.0907, 0.2125, 0.1415, 0.1150, 0.1127, 0.1608, 0.2088], + device='cuda:0'), in_proj_covar=tensor([0.0120, 0.0257, 0.0145, 0.0127, 0.0137, 0.0158, 0.0122, 0.0125], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 19:25:19,583 INFO [finetune.py:976] (0/7) Epoch 7, batch 1950, loss[loss=0.1937, simple_loss=0.2526, pruned_loss=0.06736, over 4822.00 frames. ], tot_loss[loss=0.2024, simple_loss=0.2662, pruned_loss=0.06933, over 955061.09 frames. ], batch size: 41, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:25:27,367 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.259e+02 1.677e+02 1.937e+02 2.378e+02 4.833e+02, threshold=3.873e+02, percent-clipped=2.0 +2023-04-26 19:25:58,966 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-26 19:26:09,641 INFO [finetune.py:976] (0/7) Epoch 7, batch 2000, loss[loss=0.1828, simple_loss=0.2545, pruned_loss=0.05553, over 4771.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.2631, pruned_loss=0.06837, over 955018.91 frames. ], batch size: 54, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:26:49,543 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6310, 1.4032, 1.7884, 1.9164, 1.7293, 1.5805, 1.6646, 1.7352], + device='cuda:0'), covar=tensor([1.0078, 1.2797, 1.4198, 1.6811, 1.2170, 1.6103, 1.6717, 1.4048], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0433, 0.0518, 0.0540, 0.0440, 0.0460, 0.0472, 0.0471], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:27:00,248 INFO [finetune.py:976] (0/7) Epoch 7, batch 2050, loss[loss=0.2317, simple_loss=0.2903, pruned_loss=0.0865, over 4857.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2597, pruned_loss=0.06686, over 954684.33 frames. ], batch size: 44, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:27:07,138 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36427.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:27:08,259 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.074e+02 1.550e+02 1.908e+02 2.415e+02 5.206e+02, threshold=3.816e+02, percent-clipped=3.0 +2023-04-26 19:27:08,345 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5258, 1.4673, 4.1065, 3.6510, 3.7822, 3.8568, 3.8341, 3.5005], + device='cuda:0'), covar=tensor([0.8847, 0.7327, 0.1663, 0.2968, 0.1925, 0.3358, 0.2033, 0.3081], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0304, 0.0409, 0.0413, 0.0350, 0.0408, 0.0317, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:27:08,380 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8828, 1.7985, 2.0724, 2.2901, 2.1500, 1.8468, 1.3834, 1.8616], + device='cuda:0'), covar=tensor([0.0829, 0.1091, 0.0565, 0.0619, 0.0614, 0.0879, 0.0964, 0.0685], + device='cuda:0'), in_proj_covar=tensor([0.0200, 0.0206, 0.0180, 0.0177, 0.0180, 0.0194, 0.0162, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:27:43,883 INFO [finetune.py:976] (0/7) Epoch 7, batch 2100, loss[loss=0.2139, simple_loss=0.2782, pruned_loss=0.07481, over 4823.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2598, pruned_loss=0.06661, over 954349.99 frames. ], batch size: 38, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:27:45,067 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36467.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:27:48,126 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7308, 1.2618, 4.2389, 3.9197, 3.7049, 3.8989, 3.9568, 3.7361], + device='cuda:0'), covar=tensor([0.6837, 0.5890, 0.1058, 0.1760, 0.1224, 0.1522, 0.1862, 0.1439], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0306, 0.0411, 0.0417, 0.0353, 0.0411, 0.0319, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:27:49,899 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=36475.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:28:17,169 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=36515.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:28:17,707 INFO [finetune.py:976] (0/7) Epoch 7, batch 2150, loss[loss=0.2656, simple_loss=0.3267, pruned_loss=0.1023, over 4803.00 frames. ], tot_loss[loss=0.1998, simple_loss=0.2632, pruned_loss=0.06819, over 954957.15 frames. ], batch size: 51, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:28:25,936 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.260e+02 1.822e+02 2.196e+02 2.668e+02 7.231e+02, threshold=4.392e+02, percent-clipped=1.0 +2023-04-26 19:28:30,876 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36537.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:28:51,287 INFO [finetune.py:976] (0/7) Epoch 7, batch 2200, loss[loss=0.1953, simple_loss=0.2678, pruned_loss=0.0614, over 4827.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2652, pruned_loss=0.06908, over 952802.38 frames. ], batch size: 40, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:29:27,978 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36605.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:29:30,321 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36608.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:29:38,207 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36612.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:29:41,092 INFO [finetune.py:976] (0/7) Epoch 7, batch 2250, loss[loss=0.1599, simple_loss=0.233, pruned_loss=0.04343, over 4768.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2685, pruned_loss=0.07065, over 955034.28 frames. ], batch size: 26, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:30:00,404 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.228e+02 1.830e+02 2.114e+02 2.598e+02 4.357e+02, threshold=4.229e+02, percent-clipped=0.0 +2023-04-26 19:30:23,591 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6432, 1.7398, 1.6047, 1.3603, 1.8368, 1.4196, 2.3194, 1.4011], + device='cuda:0'), covar=tensor([0.4044, 0.1684, 0.5538, 0.3105, 0.1849, 0.2604, 0.1411, 0.5044], + device='cuda:0'), in_proj_covar=tensor([0.0351, 0.0356, 0.0441, 0.0368, 0.0394, 0.0389, 0.0391, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:30:32,306 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 19:30:40,443 INFO [finetune.py:976] (0/7) Epoch 7, batch 2300, loss[loss=0.1468, simple_loss=0.2268, pruned_loss=0.03345, over 4814.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2693, pruned_loss=0.07026, over 955217.70 frames. ], batch size: 40, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:30:40,570 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36666.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:30:42,379 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36669.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:30:45,355 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36673.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:31:02,254 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36700.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 19:31:23,675 INFO [finetune.py:976] (0/7) Epoch 7, batch 2350, loss[loss=0.2226, simple_loss=0.2738, pruned_loss=0.08569, over 4816.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2687, pruned_loss=0.07075, over 956723.05 frames. ], batch size: 40, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:31:24,391 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1335, 1.4790, 1.3804, 1.7441, 1.5476, 1.7769, 1.3420, 3.0322], + device='cuda:0'), covar=tensor([0.0690, 0.0777, 0.0799, 0.1186, 0.0619, 0.0491, 0.0757, 0.0167], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0045, 0.0041, 0.0040, 0.0039, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 19:31:38,221 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.607e+01 1.814e+02 2.081e+02 2.499e+02 5.543e+02, threshold=4.163e+02, percent-clipped=2.0 +2023-04-26 19:31:38,354 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8630, 1.9940, 1.7468, 1.6096, 2.0930, 1.6775, 2.4854, 1.5293], + device='cuda:0'), covar=tensor([0.3408, 0.1464, 0.4414, 0.2558, 0.1460, 0.2037, 0.1435, 0.4317], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0356, 0.0438, 0.0367, 0.0392, 0.0387, 0.0389, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:31:45,464 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7415, 1.8407, 1.0341, 1.4814, 1.9315, 1.6650, 1.5385, 1.6110], + device='cuda:0'), covar=tensor([0.0514, 0.0389, 0.0410, 0.0594, 0.0292, 0.0575, 0.0576, 0.0628], + device='cuda:0'), in_proj_covar=tensor([0.0031, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0050, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 19:31:47,276 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1520, 2.1492, 2.6307, 2.8008, 2.5323, 2.0867, 1.7482, 2.2084], + device='cuda:0'), covar=tensor([0.1062, 0.1095, 0.0573, 0.0680, 0.0849, 0.1140, 0.1046, 0.0729], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0204, 0.0180, 0.0176, 0.0179, 0.0192, 0.0161, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:31:47,320 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7839, 1.6422, 1.9863, 2.2364, 1.8885, 1.6960, 1.7858, 1.9124], + device='cuda:0'), covar=tensor([0.8765, 1.2244, 1.2426, 1.1397, 1.0435, 1.6195, 1.5667, 1.2476], + device='cuda:0'), in_proj_covar=tensor([0.0415, 0.0436, 0.0520, 0.0543, 0.0444, 0.0463, 0.0474, 0.0473], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:32:09,210 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.78 vs. limit=5.0 +2023-04-26 19:32:21,158 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36761.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 19:32:30,354 INFO [finetune.py:976] (0/7) Epoch 7, batch 2400, loss[loss=0.1555, simple_loss=0.2313, pruned_loss=0.03986, over 4867.00 frames. ], tot_loss[loss=0.2044, simple_loss=0.2673, pruned_loss=0.07072, over 956579.40 frames. ], batch size: 34, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:32:56,382 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36805.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 19:33:03,827 INFO [finetune.py:976] (0/7) Epoch 7, batch 2450, loss[loss=0.1742, simple_loss=0.2397, pruned_loss=0.05431, over 4764.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2647, pruned_loss=0.0701, over 956000.59 frames. ], batch size: 28, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:33:12,660 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.003e+02 1.628e+02 1.856e+02 2.205e+02 3.828e+02, threshold=3.712e+02, percent-clipped=0.0 +2023-04-26 19:33:18,547 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=36837.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:33:37,047 INFO [finetune.py:976] (0/7) Epoch 7, batch 2500, loss[loss=0.2382, simple_loss=0.2963, pruned_loss=0.09009, over 4811.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2663, pruned_loss=0.0712, over 952762.38 frames. ], batch size: 40, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:33:37,171 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36866.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 19:33:51,061 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=36885.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:34:07,373 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36911.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:34:10,362 INFO [finetune.py:976] (0/7) Epoch 7, batch 2550, loss[loss=0.2349, simple_loss=0.296, pruned_loss=0.08693, over 4895.00 frames. ], tot_loss[loss=0.2073, simple_loss=0.27, pruned_loss=0.07231, over 953453.87 frames. ], batch size: 35, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:34:20,132 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.245e+02 1.780e+02 2.108e+02 2.520e+02 3.900e+02, threshold=4.217e+02, percent-clipped=2.0 +2023-04-26 19:34:45,075 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=36958.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:34:52,520 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36961.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:34:54,323 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36964.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:34:55,486 INFO [finetune.py:976] (0/7) Epoch 7, batch 2600, loss[loss=0.2361, simple_loss=0.2869, pruned_loss=0.09271, over 4818.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2697, pruned_loss=0.07119, over 954995.64 frames. ], batch size: 25, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:35:00,956 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=36968.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:35:04,477 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=36972.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:35:35,320 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-26 19:35:48,147 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37004.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:35:54,606 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-26 19:36:01,129 INFO [finetune.py:976] (0/7) Epoch 7, batch 2650, loss[loss=0.2158, simple_loss=0.2858, pruned_loss=0.0729, over 4859.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2708, pruned_loss=0.07177, over 955072.48 frames. ], batch size: 44, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:36:03,046 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37019.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:36:04,881 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9429, 1.3919, 1.7133, 2.3547, 2.3729, 1.9098, 1.5265, 1.9253], + device='cuda:0'), covar=tensor([0.1039, 0.1732, 0.0970, 0.0683, 0.0666, 0.1148, 0.1180, 0.0830], + device='cuda:0'), in_proj_covar=tensor([0.0203, 0.0209, 0.0183, 0.0180, 0.0182, 0.0196, 0.0165, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:36:09,955 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.731e+02 2.035e+02 2.586e+02 4.493e+02, threshold=4.069e+02, percent-clipped=1.0 +2023-04-26 19:36:22,265 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5576, 1.3278, 4.2623, 3.9876, 3.7033, 3.9872, 3.8418, 3.7164], + device='cuda:0'), covar=tensor([0.7420, 0.5986, 0.0915, 0.1540, 0.1053, 0.1719, 0.1961, 0.1493], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0306, 0.0410, 0.0415, 0.0351, 0.0409, 0.0319, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:36:28,803 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37056.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 19:36:34,303 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37065.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:36:34,794 INFO [finetune.py:976] (0/7) Epoch 7, batch 2700, loss[loss=0.1814, simple_loss=0.2492, pruned_loss=0.0568, over 4840.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2688, pruned_loss=0.07048, over 955815.40 frames. ], batch size: 47, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:37:19,212 INFO [finetune.py:976] (0/7) Epoch 7, batch 2750, loss[loss=0.1829, simple_loss=0.2561, pruned_loss=0.05484, over 4822.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2655, pruned_loss=0.06951, over 955844.06 frames. ], batch size: 40, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:37:32,279 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.627e+02 1.918e+02 2.292e+02 3.296e+02, threshold=3.835e+02, percent-clipped=0.0 +2023-04-26 19:37:35,140 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7212, 1.2761, 1.4592, 1.4157, 1.9269, 1.5736, 1.3176, 1.4153], + device='cuda:0'), covar=tensor([0.1701, 0.1578, 0.2095, 0.1606, 0.0892, 0.1581, 0.2052, 0.1966], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0329, 0.0353, 0.0304, 0.0341, 0.0329, 0.0308, 0.0353], + device='cuda:0'), out_proj_covar=tensor([6.5670e-05, 7.0021e-05, 7.6559e-05, 6.3224e-05, 7.1956e-05, 7.0893e-05, + 6.6519e-05, 7.5979e-05], device='cuda:0') +2023-04-26 19:38:11,730 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37161.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 19:38:20,449 INFO [finetune.py:976] (0/7) Epoch 7, batch 2800, loss[loss=0.1503, simple_loss=0.2162, pruned_loss=0.04214, over 4800.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2616, pruned_loss=0.06775, over 956409.84 frames. ], batch size: 51, lr: 3.87e-03, grad_scale: 64.0 +2023-04-26 19:38:31,052 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8604, 2.4121, 0.7968, 1.1523, 1.3863, 1.0559, 2.5575, 1.2396], + device='cuda:0'), covar=tensor([0.0816, 0.0597, 0.0823, 0.1709, 0.0631, 0.1370, 0.0415, 0.0998], + device='cuda:0'), in_proj_covar=tensor([0.0053, 0.0068, 0.0051, 0.0048, 0.0053, 0.0053, 0.0080, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 19:39:00,020 INFO [finetune.py:976] (0/7) Epoch 7, batch 2850, loss[loss=0.1852, simple_loss=0.2549, pruned_loss=0.05768, over 4764.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2607, pruned_loss=0.068, over 955926.86 frames. ], batch size: 59, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:39:06,302 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0046, 1.0394, 1.3050, 1.5123, 1.4797, 1.6997, 1.4625, 1.4250], + device='cuda:0'), covar=tensor([0.4736, 0.6774, 0.6439, 0.5659, 0.7165, 0.9991, 0.6576, 0.6764], + device='cuda:0'), in_proj_covar=tensor([0.0320, 0.0390, 0.0316, 0.0328, 0.0343, 0.0408, 0.0371, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 19:39:06,979 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-26 19:39:08,542 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.711e+02 1.920e+02 2.384e+02 5.364e+02, threshold=3.840e+02, percent-clipped=2.0 +2023-04-26 19:39:17,466 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-26 19:39:23,816 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3245, 1.7364, 2.2234, 2.8220, 2.3269, 1.7238, 1.5021, 2.1827], + device='cuda:0'), covar=tensor([0.3947, 0.3968, 0.1843, 0.3121, 0.3421, 0.3075, 0.4994, 0.2902], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0253, 0.0217, 0.0322, 0.0215, 0.0229, 0.0237, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 19:39:30,817 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37261.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:39:32,617 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37264.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:39:33,738 INFO [finetune.py:976] (0/7) Epoch 7, batch 2900, loss[loss=0.2531, simple_loss=0.3138, pruned_loss=0.09615, over 4894.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2625, pruned_loss=0.06837, over 956029.00 frames. ], batch size: 37, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:39:34,411 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37267.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:39:35,004 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:39:35,021 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37268.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:40:02,583 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=37309.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:40:10,639 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=37312.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:40:11,877 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37314.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:40:13,063 INFO [finetune.py:976] (0/7) Epoch 7, batch 2950, loss[loss=0.2018, simple_loss=0.2682, pruned_loss=0.06773, over 4830.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2661, pruned_loss=0.06908, over 957935.48 frames. ], batch size: 33, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:40:13,122 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=37316.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:40:27,124 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37329.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:40:33,129 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.889e+02 2.237e+02 2.673e+02 8.962e+02, threshold=4.474e+02, percent-clipped=7.0 +2023-04-26 19:41:05,693 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6596, 1.8538, 0.7628, 1.3096, 1.8493, 1.5584, 1.3913, 1.4407], + device='cuda:0'), covar=tensor([0.0545, 0.0358, 0.0381, 0.0562, 0.0277, 0.0523, 0.0538, 0.0607], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0037, 0.0048, 0.0048, 0.0049], + device='cuda:0') +2023-04-26 19:41:05,696 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37356.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 19:41:08,606 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37360.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:41:17,416 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37364.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:41:18,546 INFO [finetune.py:976] (0/7) Epoch 7, batch 3000, loss[loss=0.2113, simple_loss=0.2628, pruned_loss=0.07994, over 4803.00 frames. ], tot_loss[loss=0.2035, simple_loss=0.2675, pruned_loss=0.0698, over 957634.92 frames. ], batch size: 25, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:41:18,548 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 19:41:30,694 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6727, 1.7514, 1.6695, 1.3713, 1.9143, 1.5182, 2.3166, 1.5120], + device='cuda:0'), covar=tensor([0.4111, 0.1849, 0.6539, 0.3329, 0.1752, 0.2625, 0.1753, 0.5143], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0356, 0.0438, 0.0367, 0.0392, 0.0387, 0.0387, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:41:39,843 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6711, 1.0754, 1.5817, 2.0574, 1.8185, 1.5872, 1.5944, 1.6803], + device='cuda:0'), covar=tensor([0.7917, 1.0555, 1.0444, 1.1509, 0.9900, 1.3613, 1.3096, 1.0022], + device='cuda:0'), in_proj_covar=tensor([0.0414, 0.0435, 0.0519, 0.0542, 0.0442, 0.0462, 0.0473, 0.0471], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:41:40,049 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6160, 1.1759, 1.5683, 2.0297, 1.8149, 1.5356, 1.5631, 1.6246], + device='cuda:0'), covar=tensor([0.6746, 0.8859, 0.8790, 0.9080, 0.8212, 1.1161, 1.0827, 0.9266], + device='cuda:0'), in_proj_covar=tensor([0.0414, 0.0435, 0.0519, 0.0542, 0.0442, 0.0462, 0.0473, 0.0471], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:41:40,584 INFO [finetune.py:1010] (0/7) Epoch 7, validation: loss=0.1559, simple_loss=0.2289, pruned_loss=0.04148, over 2265189.00 frames. +2023-04-26 19:41:40,598 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 19:41:52,682 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4698, 0.6866, 1.2902, 1.8177, 1.5585, 1.3862, 1.3534, 1.4498], + device='cuda:0'), covar=tensor([0.7254, 0.9393, 0.9422, 1.0547, 0.8572, 1.1159, 1.1346, 0.9406], + device='cuda:0'), in_proj_covar=tensor([0.0414, 0.0434, 0.0518, 0.0540, 0.0441, 0.0461, 0.0472, 0.0470], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:41:59,641 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8850, 3.8196, 2.7455, 4.4777, 3.8783, 3.9373, 1.6076, 3.8959], + device='cuda:0'), covar=tensor([0.1486, 0.1299, 0.2808, 0.1301, 0.2965, 0.1601, 0.5690, 0.2045], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0218, 0.0250, 0.0308, 0.0302, 0.0251, 0.0273, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 19:42:15,397 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=37404.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 19:42:23,694 INFO [finetune.py:976] (0/7) Epoch 7, batch 3050, loss[loss=0.2097, simple_loss=0.2758, pruned_loss=0.07179, over 4825.00 frames. ], tot_loss[loss=0.204, simple_loss=0.268, pruned_loss=0.07001, over 956036.08 frames. ], batch size: 39, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:42:30,253 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=37425.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:42:32,451 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2011, 2.9762, 2.5646, 2.7452, 2.1761, 2.4700, 2.4492, 2.0379], + device='cuda:0'), covar=tensor([0.2068, 0.1059, 0.0790, 0.1021, 0.2348, 0.1199, 0.1755, 0.2525], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0324, 0.0235, 0.0297, 0.0318, 0.0275, 0.0265, 0.0288], + device='cuda:0'), out_proj_covar=tensor([1.2427e-04, 1.3118e-04, 9.5201e-05, 1.1894e-04, 1.3095e-04, 1.1115e-04, + 1.0846e-04, 1.1573e-04], device='cuda:0') +2023-04-26 19:42:34,151 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.257e+01 1.766e+02 2.163e+02 2.758e+02 4.245e+02, threshold=4.325e+02, percent-clipped=0.0 +2023-04-26 19:42:36,032 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6083, 3.4357, 0.8189, 2.0868, 1.9576, 2.3894, 1.9708, 1.0823], + device='cuda:0'), covar=tensor([0.1318, 0.1100, 0.2114, 0.1214, 0.1094, 0.1106, 0.1479, 0.1985], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0255, 0.0143, 0.0125, 0.0136, 0.0156, 0.0121, 0.0124], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 19:42:54,538 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37461.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 19:42:57,505 INFO [finetune.py:976] (0/7) Epoch 7, batch 3100, loss[loss=0.2133, simple_loss=0.2572, pruned_loss=0.08469, over 4929.00 frames. ], tot_loss[loss=0.203, simple_loss=0.2667, pruned_loss=0.06964, over 956089.80 frames. ], batch size: 33, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:43:14,111 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.0171, 3.3805, 1.2092, 1.9876, 2.6117, 2.0240, 4.6912, 2.4330], + device='cuda:0'), covar=tensor([0.0505, 0.0647, 0.0767, 0.1244, 0.0498, 0.0910, 0.0310, 0.0612], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0068, 0.0051, 0.0048, 0.0052, 0.0053, 0.0079, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-26 19:43:16,620 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.66 vs. limit=2.0 +2023-04-26 19:43:26,065 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=37509.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 19:43:31,232 INFO [finetune.py:976] (0/7) Epoch 7, batch 3150, loss[loss=0.1952, simple_loss=0.2629, pruned_loss=0.06375, over 4902.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2652, pruned_loss=0.06951, over 955070.43 frames. ], batch size: 32, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:43:52,227 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.638e+02 2.016e+02 2.443e+02 5.326e+02, threshold=4.032e+02, percent-clipped=1.0 +2023-04-26 19:43:53,646 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9250, 1.7006, 2.0851, 2.3745, 2.0337, 1.7915, 1.8782, 1.9393], + device='cuda:0'), covar=tensor([0.8133, 1.1293, 1.2535, 1.0865, 1.0011, 1.5103, 1.4624, 1.3274], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0432, 0.0518, 0.0538, 0.0441, 0.0460, 0.0472, 0.0470], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:44:36,861 INFO [finetune.py:976] (0/7) Epoch 7, batch 3200, loss[loss=0.2177, simple_loss=0.2776, pruned_loss=0.0789, over 4856.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2617, pruned_loss=0.06821, over 953554.51 frames. ], batch size: 44, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:44:37,546 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37567.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:44:58,326 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1950, 1.2737, 1.3873, 1.5724, 1.5201, 1.2774, 0.8616, 1.3233], + device='cuda:0'), covar=tensor([0.0940, 0.1274, 0.0810, 0.0661, 0.0736, 0.0921, 0.1022, 0.0694], + device='cuda:0'), in_proj_covar=tensor([0.0200, 0.0204, 0.0181, 0.0177, 0.0179, 0.0192, 0.0162, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:45:42,205 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37614.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:45:43,282 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=37615.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:45:43,827 INFO [finetune.py:976] (0/7) Epoch 7, batch 3250, loss[loss=0.1921, simple_loss=0.2506, pruned_loss=0.06682, over 4711.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2624, pruned_loss=0.0689, over 955007.13 frames. ], batch size: 23, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:45:51,284 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6058, 1.6831, 1.7680, 1.3545, 1.8371, 1.4809, 2.3160, 1.5677], + device='cuda:0'), covar=tensor([0.3927, 0.1705, 0.4330, 0.2795, 0.1548, 0.2139, 0.1412, 0.4316], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0350, 0.0429, 0.0361, 0.0387, 0.0381, 0.0382, 0.0416], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:45:55,380 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37624.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:46:03,766 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.639e+01 1.714e+02 1.975e+02 2.453e+02 9.656e+02, threshold=3.950e+02, percent-clipped=3.0 +2023-04-26 19:46:23,686 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4512, 1.7341, 1.3329, 0.9131, 1.1811, 1.1206, 1.3213, 1.0925], + device='cuda:0'), covar=tensor([0.1951, 0.1420, 0.1735, 0.2189, 0.2634, 0.2277, 0.1326, 0.2233], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0218, 0.0173, 0.0205, 0.0208, 0.0185, 0.0165, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 19:46:41,389 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37660.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:46:42,586 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=37662.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:46:45,460 INFO [finetune.py:976] (0/7) Epoch 7, batch 3300, loss[loss=0.1953, simple_loss=0.2418, pruned_loss=0.07443, over 4775.00 frames. ], tot_loss[loss=0.2058, simple_loss=0.268, pruned_loss=0.07176, over 956969.81 frames. ], batch size: 23, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:46:55,091 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.3488, 1.3813, 1.4149, 1.1317, 1.4238, 1.0646, 1.8148, 1.3326], + device='cuda:0'), covar=tensor([0.3977, 0.1639, 0.5340, 0.2499, 0.1583, 0.2279, 0.1736, 0.4684], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0351, 0.0431, 0.0361, 0.0388, 0.0382, 0.0383, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:47:05,289 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-26 19:47:48,146 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=37708.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:47:57,366 INFO [finetune.py:976] (0/7) Epoch 7, batch 3350, loss[loss=0.1653, simple_loss=0.2329, pruned_loss=0.04887, over 4744.00 frames. ], tot_loss[loss=0.2068, simple_loss=0.2698, pruned_loss=0.07192, over 956230.59 frames. ], batch size: 27, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:47:59,882 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=37720.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:48:12,882 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.789e+02 2.091e+02 2.525e+02 4.436e+02, threshold=4.182e+02, percent-clipped=1.0 +2023-04-26 19:48:36,823 INFO [finetune.py:976] (0/7) Epoch 7, batch 3400, loss[loss=0.2481, simple_loss=0.3012, pruned_loss=0.09754, over 4726.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.2729, pruned_loss=0.07345, over 955593.09 frames. ], batch size: 59, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:49:02,187 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6291, 2.1006, 1.2405, 1.3713, 2.1532, 1.5005, 1.4367, 1.5638], + device='cuda:0'), covar=tensor([0.0532, 0.0372, 0.0330, 0.0578, 0.0252, 0.0527, 0.0542, 0.0629], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0050, 0.0037, 0.0048, 0.0048, 0.0049], + device='cuda:0') +2023-04-26 19:49:03,588 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-26 19:49:36,094 INFO [finetune.py:976] (0/7) Epoch 7, batch 3450, loss[loss=0.2212, simple_loss=0.2843, pruned_loss=0.07907, over 4863.00 frames. ], tot_loss[loss=0.208, simple_loss=0.2718, pruned_loss=0.0721, over 954265.60 frames. ], batch size: 34, lr: 3.87e-03, grad_scale: 32.0 +2023-04-26 19:49:55,704 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.199e+01 1.617e+02 2.034e+02 2.503e+02 3.981e+02, threshold=4.069e+02, percent-clipped=0.0 +2023-04-26 19:49:57,595 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.01 vs. limit=5.0 +2023-04-26 19:50:37,111 INFO [finetune.py:976] (0/7) Epoch 7, batch 3500, loss[loss=0.1763, simple_loss=0.237, pruned_loss=0.05779, over 4827.00 frames. ], tot_loss[loss=0.2052, simple_loss=0.2686, pruned_loss=0.07093, over 951863.66 frames. ], batch size: 38, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:50:41,012 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.63 vs. limit=2.0 +2023-04-26 19:50:45,083 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4607, 1.2053, 1.6088, 1.5790, 1.3290, 1.2157, 1.2893, 0.7353], + device='cuda:0'), covar=tensor([0.0556, 0.0989, 0.0481, 0.0669, 0.0877, 0.1260, 0.0676, 0.0868], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0074, 0.0073, 0.0067, 0.0077, 0.0096, 0.0080, 0.0076], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 19:50:58,168 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3854, 3.2685, 0.9341, 1.7927, 1.8278, 2.2517, 1.8852, 0.9580], + device='cuda:0'), covar=tensor([0.1447, 0.0881, 0.1956, 0.1345, 0.1082, 0.1056, 0.1525, 0.2027], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0254, 0.0142, 0.0124, 0.0135, 0.0155, 0.0120, 0.0123], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 19:51:17,010 INFO [finetune.py:976] (0/7) Epoch 7, batch 3550, loss[loss=0.191, simple_loss=0.2555, pruned_loss=0.06327, over 4797.00 frames. ], tot_loss[loss=0.202, simple_loss=0.265, pruned_loss=0.06951, over 952103.23 frames. ], batch size: 25, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:51:27,514 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=37924.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:51:36,394 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.005e+02 1.609e+02 1.946e+02 2.387e+02 4.801e+02, threshold=3.892e+02, percent-clipped=2.0 +2023-04-26 19:52:01,352 INFO [finetune.py:976] (0/7) Epoch 7, batch 3600, loss[loss=0.198, simple_loss=0.2654, pruned_loss=0.06525, over 4840.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2626, pruned_loss=0.06834, over 954438.67 frames. ], batch size: 47, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:52:05,027 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=37972.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:52:09,401 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=37979.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:52:23,635 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-38000.pt +2023-04-26 19:52:35,872 INFO [finetune.py:976] (0/7) Epoch 7, batch 3650, loss[loss=0.2925, simple_loss=0.3425, pruned_loss=0.1212, over 4747.00 frames. ], tot_loss[loss=0.2022, simple_loss=0.2654, pruned_loss=0.06953, over 954397.16 frames. ], batch size: 59, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:52:38,358 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38020.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:52:44,353 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.731e+02 1.989e+02 2.580e+02 1.075e+03, threshold=3.977e+02, percent-clipped=3.0 +2023-04-26 19:52:51,036 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38040.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:53:04,434 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-26 19:53:08,981 INFO [finetune.py:976] (0/7) Epoch 7, batch 3700, loss[loss=0.1681, simple_loss=0.2433, pruned_loss=0.04639, over 4762.00 frames. ], tot_loss[loss=0.2039, simple_loss=0.2676, pruned_loss=0.07007, over 953340.84 frames. ], batch size: 27, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:53:10,238 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=38068.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:53:41,669 INFO [finetune.py:976] (0/7) Epoch 7, batch 3750, loss[loss=0.2448, simple_loss=0.2921, pruned_loss=0.09874, over 4917.00 frames. ], tot_loss[loss=0.2054, simple_loss=0.269, pruned_loss=0.07094, over 954712.23 frames. ], batch size: 33, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:53:45,300 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3507, 1.5198, 1.6029, 1.8106, 1.6222, 1.8410, 1.7391, 1.7026], + device='cuda:0'), covar=tensor([0.5595, 0.8372, 0.6949, 0.6698, 0.7774, 1.1093, 0.8613, 0.7805], + device='cuda:0'), in_proj_covar=tensor([0.0321, 0.0391, 0.0316, 0.0326, 0.0342, 0.0406, 0.0370, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 19:53:50,668 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.250e+02 1.891e+02 2.141e+02 2.716e+02 4.079e+02, threshold=4.281e+02, percent-clipped=1.0 +2023-04-26 19:54:31,493 INFO [finetune.py:976] (0/7) Epoch 7, batch 3800, loss[loss=0.1916, simple_loss=0.2619, pruned_loss=0.06068, over 4785.00 frames. ], tot_loss[loss=0.2072, simple_loss=0.2709, pruned_loss=0.07171, over 953904.90 frames. ], batch size: 51, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:54:42,850 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8273, 1.9914, 2.0697, 2.2098, 2.0817, 2.3172, 2.1642, 2.1143], + device='cuda:0'), covar=tensor([0.4906, 0.7546, 0.7087, 0.6215, 0.7288, 0.9288, 0.8201, 0.7368], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0392, 0.0318, 0.0327, 0.0343, 0.0408, 0.0371, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 19:55:36,932 INFO [finetune.py:976] (0/7) Epoch 7, batch 3850, loss[loss=0.1813, simple_loss=0.2556, pruned_loss=0.05356, over 4716.00 frames. ], tot_loss[loss=0.204, simple_loss=0.2679, pruned_loss=0.07, over 950317.80 frames. ], batch size: 59, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:55:48,306 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.3970, 1.3961, 1.4647, 1.1048, 1.4364, 1.1570, 1.8213, 1.3934], + device='cuda:0'), covar=tensor([0.3588, 0.1637, 0.5159, 0.2613, 0.1522, 0.2242, 0.1526, 0.4839], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0355, 0.0436, 0.0364, 0.0392, 0.0385, 0.0386, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 19:55:56,087 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.647e+02 1.925e+02 2.333e+02 3.999e+02, threshold=3.850e+02, percent-clipped=0.0 +2023-04-26 19:56:07,072 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.94 vs. limit=5.0 +2023-04-26 19:56:41,861 INFO [finetune.py:976] (0/7) Epoch 7, batch 3900, loss[loss=0.1853, simple_loss=0.2435, pruned_loss=0.06355, over 4908.00 frames. ], tot_loss[loss=0.2026, simple_loss=0.2654, pruned_loss=0.06985, over 952276.27 frames. ], batch size: 46, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:57:12,468 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1358, 2.6031, 1.0376, 1.4543, 2.0263, 1.2349, 3.6590, 1.9538], + device='cuda:0'), covar=tensor([0.0699, 0.0830, 0.0867, 0.1352, 0.0508, 0.1081, 0.0269, 0.0617], + device='cuda:0'), in_proj_covar=tensor([0.0053, 0.0069, 0.0051, 0.0048, 0.0053, 0.0054, 0.0080, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 19:57:47,851 INFO [finetune.py:976] (0/7) Epoch 7, batch 3950, loss[loss=0.1986, simple_loss=0.2703, pruned_loss=0.06344, over 4922.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2622, pruned_loss=0.06821, over 953547.91 frames. ], batch size: 37, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:58:09,819 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.650e+02 1.940e+02 2.337e+02 4.288e+02, threshold=3.879e+02, percent-clipped=1.0 +2023-04-26 19:58:18,648 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38335.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:58:43,714 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38360.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 19:58:53,666 INFO [finetune.py:976] (0/7) Epoch 7, batch 4000, loss[loss=0.2077, simple_loss=0.2668, pruned_loss=0.07434, over 4902.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2617, pruned_loss=0.06819, over 954347.39 frames. ], batch size: 35, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 19:59:36,958 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6947, 1.1205, 1.5491, 1.9856, 1.7633, 1.5675, 1.5813, 1.6507], + device='cuda:0'), covar=tensor([0.6670, 0.8943, 0.8675, 0.9933, 0.8711, 1.0425, 1.1069, 0.8956], + device='cuda:0'), in_proj_covar=tensor([0.0411, 0.0429, 0.0514, 0.0536, 0.0441, 0.0461, 0.0471, 0.0467], + device='cuda:0'), out_proj_covar=tensor([9.9994e-05, 1.0638e-04, 1.1592e-04, 1.2694e-04, 1.0738e-04, 1.1150e-04, + 1.1339e-04, 1.1386e-04], device='cuda:0') +2023-04-26 20:00:00,683 INFO [finetune.py:976] (0/7) Epoch 7, batch 4050, loss[loss=0.2086, simple_loss=0.2695, pruned_loss=0.07384, over 4937.00 frames. ], tot_loss[loss=0.2032, simple_loss=0.266, pruned_loss=0.07021, over 954900.03 frames. ], batch size: 33, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:00:10,079 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38421.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:00:21,445 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.926e+01 1.887e+02 2.250e+02 2.854e+02 6.950e+02, threshold=4.500e+02, percent-clipped=7.0 +2023-04-26 20:00:27,005 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4122, 1.6457, 1.3649, 0.9162, 1.1366, 1.0996, 1.3557, 1.0582], + device='cuda:0'), covar=tensor([0.1977, 0.1478, 0.2005, 0.2268, 0.2796, 0.2338, 0.1336, 0.2436], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0218, 0.0173, 0.0205, 0.0208, 0.0185, 0.0164, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 20:01:01,406 INFO [finetune.py:976] (0/7) Epoch 7, batch 4100, loss[loss=0.1955, simple_loss=0.2734, pruned_loss=0.05883, over 4753.00 frames. ], tot_loss[loss=0.2043, simple_loss=0.2675, pruned_loss=0.07055, over 955151.74 frames. ], batch size: 54, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:01:20,916 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-26 20:02:02,982 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-26 20:02:13,352 INFO [finetune.py:976] (0/7) Epoch 7, batch 4150, loss[loss=0.2267, simple_loss=0.2801, pruned_loss=0.08667, over 4854.00 frames. ], tot_loss[loss=0.2049, simple_loss=0.2686, pruned_loss=0.0706, over 956668.21 frames. ], batch size: 31, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:02:28,364 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.719e+02 1.998e+02 2.310e+02 4.665e+02, threshold=3.995e+02, percent-clipped=1.0 +2023-04-26 20:02:36,954 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3341, 1.5249, 1.5094, 1.6593, 1.5466, 1.6769, 1.6259, 1.5393], + device='cuda:0'), covar=tensor([0.6038, 0.8671, 0.7472, 0.6921, 0.8097, 1.1786, 0.9797, 0.8708], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0392, 0.0317, 0.0328, 0.0343, 0.0407, 0.0371, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 20:02:52,725 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7857, 1.3216, 1.6219, 1.6859, 1.5601, 1.3166, 0.7724, 1.2885], + device='cuda:0'), covar=tensor([0.4071, 0.4293, 0.2048, 0.2618, 0.3202, 0.3249, 0.4993, 0.3008], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0254, 0.0219, 0.0323, 0.0215, 0.0230, 0.0238, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 20:02:53,446 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.00 vs. limit=5.0 +2023-04-26 20:02:57,367 INFO [finetune.py:976] (0/7) Epoch 7, batch 4200, loss[loss=0.2032, simple_loss=0.272, pruned_loss=0.06715, over 4806.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2695, pruned_loss=0.07058, over 956152.89 frames. ], batch size: 41, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:02:58,823 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.92 vs. limit=5.0 +2023-04-26 20:03:12,385 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3298, 2.6934, 1.2741, 1.6797, 2.2376, 1.4909, 3.6199, 1.9124], + device='cuda:0'), covar=tensor([0.0615, 0.0626, 0.0721, 0.1223, 0.0470, 0.0967, 0.0253, 0.0593], + device='cuda:0'), in_proj_covar=tensor([0.0053, 0.0069, 0.0051, 0.0048, 0.0053, 0.0054, 0.0080, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 20:03:29,907 INFO [finetune.py:976] (0/7) Epoch 7, batch 4250, loss[loss=0.1906, simple_loss=0.2615, pruned_loss=0.05984, over 4904.00 frames. ], tot_loss[loss=0.2038, simple_loss=0.268, pruned_loss=0.06976, over 957002.25 frames. ], batch size: 46, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:03:40,422 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.629e+02 2.024e+02 2.405e+02 4.304e+02, threshold=4.048e+02, percent-clipped=1.0 +2023-04-26 20:03:44,043 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=38635.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:03:47,549 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0545, 2.5013, 0.9794, 1.3372, 1.8933, 1.2090, 3.0223, 1.5051], + device='cuda:0'), covar=tensor([0.0639, 0.0503, 0.0722, 0.1217, 0.0477, 0.0990, 0.0204, 0.0670], + device='cuda:0'), in_proj_covar=tensor([0.0053, 0.0069, 0.0051, 0.0048, 0.0053, 0.0054, 0.0080, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 20:03:58,941 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2181, 1.5740, 1.3424, 1.8399, 1.6548, 1.8878, 1.3669, 3.3623], + device='cuda:0'), covar=tensor([0.0684, 0.0791, 0.0817, 0.1114, 0.0628, 0.0497, 0.0785, 0.0161], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0044, 0.0040, 0.0040, 0.0039, 0.0060], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 20:04:03,667 INFO [finetune.py:976] (0/7) Epoch 7, batch 4300, loss[loss=0.2166, simple_loss=0.2717, pruned_loss=0.08071, over 4823.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.264, pruned_loss=0.06824, over 957185.25 frames. ], batch size: 41, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:04:26,426 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=38683.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:05:03,900 INFO [finetune.py:976] (0/7) Epoch 7, batch 4350, loss[loss=0.2034, simple_loss=0.2609, pruned_loss=0.07297, over 4914.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2601, pruned_loss=0.06627, over 958867.46 frames. ], batch size: 36, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:05:08,718 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=38716.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:05:18,153 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.164e+02 1.707e+02 2.023e+02 2.548e+02 4.531e+02, threshold=4.045e+02, percent-clipped=2.0 +2023-04-26 20:05:52,729 INFO [finetune.py:976] (0/7) Epoch 7, batch 4400, loss[loss=0.1843, simple_loss=0.2549, pruned_loss=0.05678, over 4791.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2618, pruned_loss=0.06728, over 958727.48 frames. ], batch size: 29, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:06:26,655 INFO [finetune.py:976] (0/7) Epoch 7, batch 4450, loss[loss=0.2691, simple_loss=0.3245, pruned_loss=0.1069, over 4766.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2655, pruned_loss=0.06922, over 956923.66 frames. ], batch size: 54, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:06:36,492 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=38823.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:06:37,049 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6493, 1.3142, 4.5031, 4.2386, 3.9756, 4.2665, 4.1752, 3.9676], + device='cuda:0'), covar=tensor([0.6978, 0.6354, 0.0941, 0.1538, 0.1142, 0.1703, 0.1266, 0.1341], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0304, 0.0406, 0.0410, 0.0347, 0.0403, 0.0316, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:06:45,522 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.756e+02 2.089e+02 2.730e+02 5.109e+02, threshold=4.178e+02, percent-clipped=5.0 +2023-04-26 20:06:57,382 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2984, 1.5655, 1.5621, 1.7531, 1.6879, 1.8158, 1.7195, 1.6940], + device='cuda:0'), covar=tensor([0.5494, 0.8186, 0.6909, 0.6686, 0.7717, 1.1466, 0.8451, 0.7588], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0390, 0.0315, 0.0326, 0.0341, 0.0405, 0.0369, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 20:07:37,023 INFO [finetune.py:976] (0/7) Epoch 7, batch 4500, loss[loss=0.272, simple_loss=0.3263, pruned_loss=0.1089, over 4812.00 frames. ], tot_loss[loss=0.2036, simple_loss=0.2676, pruned_loss=0.06983, over 956504.63 frames. ], batch size: 41, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:07:38,920 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6655, 1.2998, 4.4876, 4.2775, 3.9523, 4.2446, 4.1579, 3.9001], + device='cuda:0'), covar=tensor([0.6692, 0.5986, 0.0966, 0.1293, 0.0971, 0.1729, 0.1136, 0.1414], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0303, 0.0405, 0.0409, 0.0346, 0.0402, 0.0315, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:07:59,558 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=38884.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:08:01,868 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2462, 1.4881, 1.3051, 1.7527, 1.5512, 1.6813, 1.3514, 3.0793], + device='cuda:0'), covar=tensor([0.0627, 0.0773, 0.0850, 0.1182, 0.0663, 0.0526, 0.0736, 0.0184], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0044, 0.0040, 0.0040, 0.0039, 0.0060], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0017], + device='cuda:0') +2023-04-26 20:08:44,169 INFO [finetune.py:976] (0/7) Epoch 7, batch 4550, loss[loss=0.2182, simple_loss=0.2754, pruned_loss=0.08044, over 4833.00 frames. ], tot_loss[loss=0.2053, simple_loss=0.2698, pruned_loss=0.07037, over 957116.91 frames. ], batch size: 47, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:08:58,070 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.107e+02 1.848e+02 2.064e+02 2.542e+02 5.076e+02, threshold=4.127e+02, percent-clipped=2.0 +2023-04-26 20:09:07,900 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-04-26 20:09:50,305 INFO [finetune.py:976] (0/7) Epoch 7, batch 4600, loss[loss=0.1531, simple_loss=0.2283, pruned_loss=0.03896, over 4760.00 frames. ], tot_loss[loss=0.2018, simple_loss=0.2671, pruned_loss=0.06819, over 957299.85 frames. ], batch size: 28, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:10:56,078 INFO [finetune.py:976] (0/7) Epoch 7, batch 4650, loss[loss=0.1516, simple_loss=0.22, pruned_loss=0.04158, over 4711.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2639, pruned_loss=0.06719, over 954484.62 frames. ], batch size: 23, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:10:56,183 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39016.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:11:06,411 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.7549, 1.7698, 1.8111, 1.3481, 2.0884, 1.4530, 2.5817, 1.5918], + device='cuda:0'), covar=tensor([0.4248, 0.1840, 0.5416, 0.3566, 0.1801, 0.2588, 0.1377, 0.5053], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0354, 0.0435, 0.0364, 0.0390, 0.0383, 0.0385, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:11:16,118 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.581e+02 1.907e+02 2.292e+02 5.308e+02, threshold=3.814e+02, percent-clipped=1.0 +2023-04-26 20:12:02,780 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=39064.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:12:03,960 INFO [finetune.py:976] (0/7) Epoch 7, batch 4700, loss[loss=0.1559, simple_loss=0.2286, pruned_loss=0.04162, over 4789.00 frames. ], tot_loss[loss=0.197, simple_loss=0.2609, pruned_loss=0.06656, over 954848.14 frames. ], batch size: 29, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:13:04,273 INFO [finetune.py:976] (0/7) Epoch 7, batch 4750, loss[loss=0.1565, simple_loss=0.2242, pruned_loss=0.04444, over 4778.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2588, pruned_loss=0.06638, over 955679.12 frames. ], batch size: 26, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:13:24,489 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.720e+02 2.202e+02 2.598e+02 5.909e+02, threshold=4.403e+02, percent-clipped=7.0 +2023-04-26 20:14:16,832 INFO [finetune.py:976] (0/7) Epoch 7, batch 4800, loss[loss=0.2436, simple_loss=0.3024, pruned_loss=0.09244, over 4776.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2628, pruned_loss=0.06859, over 953731.56 frames. ], batch size: 26, lr: 3.86e-03, grad_scale: 32.0 +2023-04-26 20:14:31,376 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39179.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:14:40,466 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39185.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:15:12,278 INFO [finetune.py:976] (0/7) Epoch 7, batch 4850, loss[loss=0.2027, simple_loss=0.2691, pruned_loss=0.06815, over 4836.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2664, pruned_loss=0.06955, over 955731.46 frames. ], batch size: 47, lr: 3.86e-03, grad_scale: 64.0 +2023-04-26 20:15:21,785 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.823e+02 2.117e+02 2.667e+02 5.725e+02, threshold=4.234e+02, percent-clipped=1.0 +2023-04-26 20:15:27,354 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4112, 1.0061, 0.3705, 1.1285, 1.1343, 1.2966, 1.1844, 1.2129], + device='cuda:0'), covar=tensor([0.0582, 0.0451, 0.0487, 0.0596, 0.0325, 0.0587, 0.0536, 0.0652], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0050, 0.0037, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 20:15:28,619 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8792, 1.4023, 1.7149, 1.6337, 1.6038, 1.3649, 0.6927, 1.3329], + device='cuda:0'), covar=tensor([0.3650, 0.3893, 0.1887, 0.2785, 0.3283, 0.2910, 0.5259, 0.2772], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0254, 0.0219, 0.0324, 0.0215, 0.0229, 0.0238, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 20:15:31,633 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39246.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:15:45,481 INFO [finetune.py:976] (0/7) Epoch 7, batch 4900, loss[loss=0.1756, simple_loss=0.2398, pruned_loss=0.05571, over 4716.00 frames. ], tot_loss[loss=0.2051, simple_loss=0.2691, pruned_loss=0.07059, over 954007.14 frames. ], batch size: 23, lr: 3.86e-03, grad_scale: 64.0 +2023-04-26 20:15:48,035 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-04-26 20:16:09,149 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39284.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:16:50,966 INFO [finetune.py:976] (0/7) Epoch 7, batch 4950, loss[loss=0.2294, simple_loss=0.2917, pruned_loss=0.08352, over 4797.00 frames. ], tot_loss[loss=0.205, simple_loss=0.2692, pruned_loss=0.0704, over 954527.64 frames. ], batch size: 45, lr: 3.86e-03, grad_scale: 64.0 +2023-04-26 20:17:04,979 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6354, 1.7019, 0.8879, 1.3539, 1.8324, 1.5091, 1.4448, 1.4536], + device='cuda:0'), covar=tensor([0.0552, 0.0396, 0.0407, 0.0579, 0.0292, 0.0566, 0.0527, 0.0616], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0050, 0.0037, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 20:17:06,092 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.715e+02 2.035e+02 2.513e+02 5.677e+02, threshold=4.070e+02, percent-clipped=1.0 +2023-04-26 20:17:25,348 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39345.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:17:49,759 INFO [finetune.py:976] (0/7) Epoch 7, batch 5000, loss[loss=0.1755, simple_loss=0.2392, pruned_loss=0.05589, over 4929.00 frames. ], tot_loss[loss=0.202, simple_loss=0.2663, pruned_loss=0.06884, over 952015.24 frames. ], batch size: 38, lr: 3.86e-03, grad_scale: 64.0 +2023-04-26 20:17:54,788 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-26 20:18:14,780 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-26 20:18:23,433 INFO [finetune.py:976] (0/7) Epoch 7, batch 5050, loss[loss=0.2018, simple_loss=0.2468, pruned_loss=0.07839, over 4255.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2628, pruned_loss=0.06736, over 953738.02 frames. ], batch size: 65, lr: 3.85e-03, grad_scale: 64.0 +2023-04-26 20:18:25,918 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7901, 1.9310, 1.9063, 2.1631, 1.9235, 2.1483, 1.9489, 1.8674], + device='cuda:0'), covar=tensor([0.5837, 0.9782, 0.7637, 0.5833, 0.8273, 1.0715, 1.0720, 0.8860], + device='cuda:0'), in_proj_covar=tensor([0.0324, 0.0392, 0.0317, 0.0327, 0.0343, 0.0406, 0.0369, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 20:18:32,406 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.17 vs. limit=5.0 +2023-04-26 20:18:32,942 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1937, 2.0679, 2.4211, 2.6371, 2.6222, 2.0591, 1.6912, 2.2573], + device='cuda:0'), covar=tensor([0.1002, 0.1043, 0.0614, 0.0657, 0.0643, 0.1038, 0.0937, 0.0647], + device='cuda:0'), in_proj_covar=tensor([0.0201, 0.0205, 0.0183, 0.0180, 0.0180, 0.0195, 0.0163, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:18:33,400 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.684e+02 2.008e+02 2.415e+02 4.173e+02, threshold=4.016e+02, percent-clipped=2.0 +2023-04-26 20:18:54,452 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39463.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:18:56,691 INFO [finetune.py:976] (0/7) Epoch 7, batch 5100, loss[loss=0.2208, simple_loss=0.281, pruned_loss=0.08028, over 4815.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2594, pruned_loss=0.0662, over 954523.22 frames. ], batch size: 38, lr: 3.85e-03, grad_scale: 64.0 +2023-04-26 20:19:05,633 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39479.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:19:16,119 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-26 20:19:29,889 INFO [finetune.py:976] (0/7) Epoch 7, batch 5150, loss[loss=0.2074, simple_loss=0.2776, pruned_loss=0.06858, over 4809.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.26, pruned_loss=0.06682, over 950443.43 frames. ], batch size: 41, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:19:35,803 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39524.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:19:38,071 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=39527.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:19:40,152 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-26 20:19:40,452 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.309e+02 1.798e+02 2.123e+02 2.599e+02 5.486e+02, threshold=4.247e+02, percent-clipped=4.0 +2023-04-26 20:19:41,768 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1438, 1.4114, 1.2243, 1.7082, 1.4326, 1.7895, 1.2731, 3.2642], + device='cuda:0'), covar=tensor([0.0692, 0.0853, 0.0896, 0.1285, 0.0719, 0.0558, 0.0830, 0.0181], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0041, 0.0044, 0.0040, 0.0040, 0.0039, 0.0060], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 20:19:47,587 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39541.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:19:48,256 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8505, 1.5628, 1.9971, 2.1766, 1.6995, 1.3237, 1.6861, 1.0882], + device='cuda:0'), covar=tensor([0.0591, 0.0904, 0.0486, 0.0576, 0.0668, 0.1403, 0.0759, 0.0948], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0074, 0.0073, 0.0067, 0.0077, 0.0096, 0.0080, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 20:20:03,313 INFO [finetune.py:976] (0/7) Epoch 7, batch 5200, loss[loss=0.2119, simple_loss=0.2749, pruned_loss=0.07448, over 4794.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2613, pruned_loss=0.06707, over 946992.44 frames. ], batch size: 26, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:20:08,681 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1291, 1.3087, 1.4840, 1.6318, 1.5761, 1.6980, 1.5585, 1.5242], + device='cuda:0'), covar=tensor([0.5967, 0.7364, 0.6685, 0.5995, 0.7493, 1.0698, 0.7331, 0.7156], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0390, 0.0315, 0.0325, 0.0342, 0.0406, 0.0367, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 20:20:30,584 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9722, 1.7481, 1.9728, 2.3040, 2.3761, 1.9207, 1.5415, 1.9497], + device='cuda:0'), covar=tensor([0.0773, 0.1034, 0.0620, 0.0519, 0.0500, 0.0804, 0.0756, 0.0607], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0203, 0.0181, 0.0178, 0.0178, 0.0193, 0.0162, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:20:37,216 INFO [finetune.py:976] (0/7) Epoch 7, batch 5250, loss[loss=0.2139, simple_loss=0.2596, pruned_loss=0.08413, over 4444.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2641, pruned_loss=0.0681, over 947237.79 frames. ], batch size: 19, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:20:47,796 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.776e+02 2.030e+02 2.603e+02 8.469e+02, threshold=4.060e+02, percent-clipped=1.0 +2023-04-26 20:20:53,195 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39639.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 20:20:53,799 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39640.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:21:03,885 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9528, 2.3718, 2.7613, 3.4533, 2.7266, 2.3609, 2.1300, 2.6942], + device='cuda:0'), covar=tensor([0.3600, 0.3755, 0.1644, 0.3039, 0.3225, 0.2910, 0.4469, 0.2645], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0254, 0.0219, 0.0323, 0.0215, 0.0230, 0.0238, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 20:21:10,370 INFO [finetune.py:976] (0/7) Epoch 7, batch 5300, loss[loss=0.1981, simple_loss=0.2586, pruned_loss=0.06883, over 4314.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2666, pruned_loss=0.06941, over 947879.06 frames. ], batch size: 65, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:21:50,879 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39700.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 20:22:11,712 INFO [finetune.py:976] (0/7) Epoch 7, batch 5350, loss[loss=0.2223, simple_loss=0.2745, pruned_loss=0.08503, over 4793.00 frames. ], tot_loss[loss=0.2017, simple_loss=0.2662, pruned_loss=0.06862, over 949833.98 frames. ], batch size: 29, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:22:31,524 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.675e+02 2.001e+02 2.311e+02 3.893e+02, threshold=4.002e+02, percent-clipped=0.0 +2023-04-26 20:22:32,273 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6105, 1.6478, 0.9040, 1.3395, 1.8502, 1.4892, 1.4086, 1.4285], + device='cuda:0'), covar=tensor([0.0534, 0.0379, 0.0371, 0.0552, 0.0268, 0.0528, 0.0506, 0.0600], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0050, 0.0037, 0.0048, 0.0048, 0.0049], + device='cuda:0') +2023-04-26 20:22:44,552 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2292, 1.6710, 2.1120, 2.6835, 2.0345, 1.7205, 1.3333, 1.9290], + device='cuda:0'), covar=tensor([0.4214, 0.4334, 0.2207, 0.3073, 0.3627, 0.3496, 0.5394, 0.2948], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0255, 0.0219, 0.0323, 0.0216, 0.0230, 0.0238, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 20:23:17,037 INFO [finetune.py:976] (0/7) Epoch 7, batch 5400, loss[loss=0.1641, simple_loss=0.2219, pruned_loss=0.0531, over 4744.00 frames. ], tot_loss[loss=0.1999, simple_loss=0.264, pruned_loss=0.06791, over 950909.41 frames. ], batch size: 27, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:23:17,773 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39767.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:23:29,475 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-26 20:23:48,438 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3868, 1.5667, 1.3943, 1.5690, 1.4200, 1.3239, 1.5090, 1.1768], + device='cuda:0'), covar=tensor([0.1801, 0.1506, 0.1061, 0.1316, 0.3354, 0.1347, 0.1603, 0.2144], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0322, 0.0234, 0.0294, 0.0317, 0.0274, 0.0262, 0.0287], + device='cuda:0'), out_proj_covar=tensor([1.2273e-04, 1.3048e-04, 9.4809e-05, 1.1790e-04, 1.3041e-04, 1.1091e-04, + 1.0742e-04, 1.1529e-04], device='cuda:0') +2023-04-26 20:24:18,139 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=39815.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:24:18,613 INFO [finetune.py:976] (0/7) Epoch 7, batch 5450, loss[loss=0.1788, simple_loss=0.2439, pruned_loss=0.0569, over 4790.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2613, pruned_loss=0.06693, over 952765.51 frames. ], batch size: 29, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:24:20,495 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39819.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:24:30,984 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4051, 1.3785, 4.1827, 3.9185, 3.7066, 3.9609, 3.9884, 3.6936], + device='cuda:0'), covar=tensor([0.6322, 0.5482, 0.1066, 0.1570, 0.1063, 0.1639, 0.1537, 0.1509], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0307, 0.0408, 0.0412, 0.0349, 0.0405, 0.0318, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:24:31,605 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39828.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:24:33,271 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.624e+02 1.910e+02 2.215e+02 3.764e+02, threshold=3.819e+02, percent-clipped=0.0 +2023-04-26 20:24:40,438 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39841.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:24:57,926 INFO [finetune.py:976] (0/7) Epoch 7, batch 5500, loss[loss=0.141, simple_loss=0.2138, pruned_loss=0.03417, over 4819.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2576, pruned_loss=0.06549, over 953825.65 frames. ], batch size: 25, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:25:04,117 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=39876.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:25:04,922 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-26 20:25:12,053 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=39889.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:25:31,741 INFO [finetune.py:976] (0/7) Epoch 7, batch 5550, loss[loss=0.1586, simple_loss=0.2256, pruned_loss=0.04581, over 4777.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2617, pruned_loss=0.0676, over 954514.85 frames. ], batch size: 26, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:25:40,906 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.695e+02 2.043e+02 2.631e+02 5.173e+02, threshold=4.085e+02, percent-clipped=3.0 +2023-04-26 20:25:46,454 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=39940.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:26:01,495 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.56 vs. limit=5.0 +2023-04-26 20:26:02,973 INFO [finetune.py:976] (0/7) Epoch 7, batch 5600, loss[loss=0.2333, simple_loss=0.3017, pruned_loss=0.08243, over 4815.00 frames. ], tot_loss[loss=0.2031, simple_loss=0.2673, pruned_loss=0.06948, over 954746.90 frames. ], batch size: 51, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:26:15,861 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=39988.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:26:20,004 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=39995.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 20:26:21,742 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2645, 1.6081, 1.3826, 1.7241, 1.6339, 1.9326, 1.4047, 3.7071], + device='cuda:0'), covar=tensor([0.0631, 0.0798, 0.0822, 0.1229, 0.0638, 0.0552, 0.0754, 0.0143], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0039, 0.0039, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 20:26:22,990 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-40000.pt +2023-04-26 20:26:38,931 INFO [finetune.py:976] (0/7) Epoch 7, batch 5650, loss[loss=0.1895, simple_loss=0.2601, pruned_loss=0.05943, over 4750.00 frames. ], tot_loss[loss=0.2034, simple_loss=0.2688, pruned_loss=0.069, over 955057.10 frames. ], batch size: 27, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:26:52,395 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6613, 1.0073, 1.4450, 1.5590, 1.5327, 1.6791, 1.3917, 1.4379], + device='cuda:0'), covar=tensor([0.4895, 0.7536, 0.6419, 0.5996, 0.7248, 1.0085, 0.6693, 0.6523], + device='cuda:0'), in_proj_covar=tensor([0.0321, 0.0389, 0.0316, 0.0325, 0.0341, 0.0405, 0.0367, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 20:26:53,089 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-26 20:26:53,979 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.232e+02 1.674e+02 2.052e+02 2.439e+02 4.352e+02, threshold=4.105e+02, percent-clipped=2.0 +2023-04-26 20:27:27,071 INFO [finetune.py:976] (0/7) Epoch 7, batch 5700, loss[loss=0.2374, simple_loss=0.2761, pruned_loss=0.0994, over 4131.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2636, pruned_loss=0.06778, over 937873.30 frames. ], batch size: 18, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:27:27,725 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([5.1641, 4.9648, 3.5370, 5.8021, 5.1273, 5.1253, 2.5437, 4.8925], + device='cuda:0'), covar=tensor([0.1323, 0.0802, 0.2477, 0.0645, 0.2309, 0.1483, 0.5532, 0.2035], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0217, 0.0252, 0.0308, 0.0302, 0.0252, 0.0273, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 20:27:38,837 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0094, 0.6383, 0.8264, 0.7109, 1.1695, 0.9256, 0.7840, 0.8839], + device='cuda:0'), covar=tensor([0.1620, 0.1548, 0.1892, 0.1625, 0.1044, 0.1317, 0.1715, 0.2059], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0329, 0.0359, 0.0303, 0.0342, 0.0329, 0.0310, 0.0356], + device='cuda:0'), out_proj_covar=tensor([6.5867e-05, 7.0032e-05, 7.7877e-05, 6.2793e-05, 7.2011e-05, 7.0800e-05, + 6.7044e-05, 7.6398e-05], device='cuda:0') +2023-04-26 20:27:59,535 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-7.pt +2023-04-26 20:28:14,051 INFO [finetune.py:976] (0/7) Epoch 8, batch 0, loss[loss=0.2199, simple_loss=0.2808, pruned_loss=0.07951, over 4915.00 frames. ], tot_loss[loss=0.2199, simple_loss=0.2808, pruned_loss=0.07951, over 4915.00 frames. ], batch size: 41, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:28:14,053 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 20:28:21,661 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5629, 1.3635, 1.7324, 1.6895, 1.4328, 1.2737, 1.4647, 0.9269], + device='cuda:0'), covar=tensor([0.0618, 0.0995, 0.0949, 0.0654, 0.0804, 0.1351, 0.0909, 0.1342], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0074, 0.0073, 0.0067, 0.0077, 0.0096, 0.0080, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 20:28:30,560 INFO [finetune.py:1010] (0/7) Epoch 8, validation: loss=0.1574, simple_loss=0.2299, pruned_loss=0.04247, over 2265189.00 frames. +2023-04-26 20:28:30,561 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 20:29:03,015 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40119.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:29:03,236 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-26 20:29:05,427 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40123.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:29:10,787 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.231e+02 1.752e+02 2.089e+02 2.536e+02 4.447e+02, threshold=4.178e+02, percent-clipped=1.0 +2023-04-26 20:29:20,982 INFO [finetune.py:976] (0/7) Epoch 8, batch 50, loss[loss=0.1974, simple_loss=0.2465, pruned_loss=0.07417, over 4738.00 frames. ], tot_loss[loss=0.2099, simple_loss=0.273, pruned_loss=0.07337, over 217739.00 frames. ], batch size: 54, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:29:36,055 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=40167.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:29:38,546 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40171.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:29:41,243 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-26 20:29:42,172 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.5581, 4.4022, 3.1975, 5.2781, 4.6656, 4.6199, 2.0190, 4.4320], + device='cuda:0'), covar=tensor([0.1528, 0.0965, 0.3280, 0.0698, 0.2691, 0.1530, 0.5537, 0.2121], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0218, 0.0253, 0.0310, 0.0303, 0.0253, 0.0274, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 20:29:54,380 INFO [finetune.py:976] (0/7) Epoch 8, batch 100, loss[loss=0.2075, simple_loss=0.2714, pruned_loss=0.07177, over 4819.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2642, pruned_loss=0.06896, over 382881.67 frames. ], batch size: 30, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:29:55,096 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40195.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:30:01,238 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5119, 1.0800, 1.2418, 1.1834, 1.6651, 1.3408, 1.1280, 1.2661], + device='cuda:0'), covar=tensor([0.1506, 0.1668, 0.1927, 0.1423, 0.0886, 0.1554, 0.2043, 0.2125], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0328, 0.0357, 0.0303, 0.0341, 0.0328, 0.0309, 0.0354], + device='cuda:0'), out_proj_covar=tensor([6.5548e-05, 6.9670e-05, 7.7417e-05, 6.2802e-05, 7.1811e-05, 7.0622e-05, + 6.6724e-05, 7.5959e-05], device='cuda:0') +2023-04-26 20:30:10,497 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40218.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:30:18,316 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.606e+02 1.925e+02 2.421e+02 4.111e+02, threshold=3.850e+02, percent-clipped=0.0 +2023-04-26 20:30:28,097 INFO [finetune.py:976] (0/7) Epoch 8, batch 150, loss[loss=0.1906, simple_loss=0.25, pruned_loss=0.06562, over 4905.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2557, pruned_loss=0.06582, over 509877.99 frames. ], batch size: 37, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:30:36,423 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40256.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:30:46,190 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4641, 2.5304, 2.1175, 2.2898, 2.6386, 2.1159, 3.5241, 2.1260], + device='cuda:0'), covar=tensor([0.4145, 0.2209, 0.4734, 0.3620, 0.2051, 0.2893, 0.1487, 0.3713], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0350, 0.0432, 0.0361, 0.0386, 0.0380, 0.0382, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:30:46,192 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40272.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:30:50,429 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40279.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:31:01,726 INFO [finetune.py:976] (0/7) Epoch 8, batch 200, loss[loss=0.1852, simple_loss=0.2612, pruned_loss=0.05465, over 4843.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2571, pruned_loss=0.06789, over 610040.56 frames. ], batch size: 44, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:31:02,442 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40295.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 20:31:24,740 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40330.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:31:25,243 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.700e+02 1.993e+02 2.528e+02 5.564e+02, threshold=3.985e+02, percent-clipped=2.0 +2023-04-26 20:31:26,602 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40333.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:31:34,053 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=40343.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 20:31:35,036 INFO [finetune.py:976] (0/7) Epoch 8, batch 250, loss[loss=0.1703, simple_loss=0.2352, pruned_loss=0.0527, over 4771.00 frames. ], tot_loss[loss=0.1977, simple_loss=0.2603, pruned_loss=0.06751, over 688967.36 frames. ], batch size: 26, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:31:35,821 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-04-26 20:32:05,768 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40391.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:32:07,909 INFO [finetune.py:976] (0/7) Epoch 8, batch 300, loss[loss=0.2045, simple_loss=0.2743, pruned_loss=0.06733, over 4858.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2619, pruned_loss=0.06726, over 747567.06 frames. ], batch size: 34, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:32:27,218 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40423.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:32:31,997 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.648e+02 2.026e+02 2.422e+02 3.959e+02, threshold=4.051e+02, percent-clipped=0.0 +2023-04-26 20:32:32,257 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-26 20:32:45,857 INFO [finetune.py:976] (0/7) Epoch 8, batch 350, loss[loss=0.164, simple_loss=0.2313, pruned_loss=0.0484, over 4826.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2633, pruned_loss=0.06745, over 793593.24 frames. ], batch size: 30, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:33:27,702 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:33:27,743 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40471.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:33:51,981 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40492.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:33:53,113 INFO [finetune.py:976] (0/7) Epoch 8, batch 400, loss[loss=0.196, simple_loss=0.2686, pruned_loss=0.06169, over 4758.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2639, pruned_loss=0.06724, over 827960.06 frames. ], batch size: 28, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:34:02,275 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-04-26 20:34:32,773 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=40519.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:34:43,524 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7448, 1.7561, 2.0224, 2.3052, 2.2778, 1.8593, 1.5272, 1.8878], + device='cuda:0'), covar=tensor([0.0961, 0.1160, 0.0647, 0.0583, 0.0565, 0.0855, 0.0833, 0.0701], + device='cuda:0'), in_proj_covar=tensor([0.0203, 0.0210, 0.0186, 0.0183, 0.0182, 0.0197, 0.0166, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:34:45,208 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.623e+02 1.962e+02 2.378e+02 5.703e+02, threshold=3.923e+02, percent-clipped=1.0 +2023-04-26 20:35:05,025 INFO [finetune.py:976] (0/7) Epoch 8, batch 450, loss[loss=0.1792, simple_loss=0.2357, pruned_loss=0.06138, over 4815.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2637, pruned_loss=0.06756, over 854867.70 frames. ], batch size: 39, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:35:16,806 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40551.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:35:18,616 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40553.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:35:38,784 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40574.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:35:56,423 INFO [finetune.py:976] (0/7) Epoch 8, batch 500, loss[loss=0.1895, simple_loss=0.2491, pruned_loss=0.06498, over 4826.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2605, pruned_loss=0.06629, over 879368.49 frames. ], batch size: 51, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:36:07,323 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3999, 1.8034, 1.7868, 2.1526, 1.8778, 1.9716, 1.5929, 4.4334], + device='cuda:0'), covar=tensor([0.0580, 0.0740, 0.0731, 0.1092, 0.0635, 0.0557, 0.0724, 0.0091], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0040, 0.0044, 0.0040, 0.0040, 0.0039, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 20:36:15,137 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6741, 1.3767, 1.8570, 1.8904, 1.4842, 1.2317, 1.5341, 0.9486], + device='cuda:0'), covar=tensor([0.0636, 0.1055, 0.0565, 0.0653, 0.0841, 0.1394, 0.0838, 0.0982], + device='cuda:0'), in_proj_covar=tensor([0.0065, 0.0074, 0.0072, 0.0066, 0.0076, 0.0095, 0.0080, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 20:36:18,047 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4298, 2.5208, 2.8205, 3.0646, 2.8378, 2.4342, 2.1134, 2.3870], + device='cuda:0'), covar=tensor([0.1008, 0.0960, 0.0525, 0.0674, 0.0687, 0.0983, 0.0819, 0.0720], + device='cuda:0'), in_proj_covar=tensor([0.0202, 0.0209, 0.0185, 0.0182, 0.0182, 0.0196, 0.0165, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:36:19,267 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6804, 1.7519, 1.8568, 2.4517, 2.6037, 2.2823, 2.1273, 2.0477], + device='cuda:0'), covar=tensor([0.1600, 0.1956, 0.2042, 0.1763, 0.1415, 0.1932, 0.2220, 0.1967], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0326, 0.0355, 0.0301, 0.0340, 0.0326, 0.0308, 0.0352], + device='cuda:0'), out_proj_covar=tensor([6.5370e-05, 6.9392e-05, 7.6976e-05, 6.2400e-05, 7.1604e-05, 7.0330e-05, + 6.6616e-05, 7.5607e-05], device='cuda:0') +2023-04-26 20:36:37,621 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40628.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:36:39,368 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.145e+02 1.684e+02 2.034e+02 2.425e+02 5.158e+02, threshold=4.068e+02, percent-clipped=3.0 +2023-04-26 20:36:52,501 INFO [finetune.py:976] (0/7) Epoch 8, batch 550, loss[loss=0.1824, simple_loss=0.2468, pruned_loss=0.05904, over 4789.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2573, pruned_loss=0.06517, over 894907.73 frames. ], batch size: 29, lr: 3.85e-03, grad_scale: 32.0 +2023-04-26 20:36:56,968 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40651.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:37:47,166 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40686.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:37:52,036 INFO [finetune.py:976] (0/7) Epoch 8, batch 600, loss[loss=0.1816, simple_loss=0.2448, pruned_loss=0.05923, over 4822.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.259, pruned_loss=0.06642, over 909810.90 frames. ], batch size: 30, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:38:21,430 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40712.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:38:22,131 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.77 vs. limit=2.0 +2023-04-26 20:38:44,482 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.285e+02 1.754e+02 2.110e+02 2.532e+02 4.405e+02, threshold=4.220e+02, percent-clipped=2.0 +2023-04-26 20:38:54,525 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8168, 1.7282, 1.9774, 2.1464, 1.6630, 1.2764, 1.8622, 1.0662], + device='cuda:0'), covar=tensor([0.0797, 0.0848, 0.0670, 0.0947, 0.0937, 0.1481, 0.0851, 0.1078], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0074, 0.0072, 0.0067, 0.0076, 0.0096, 0.0080, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 20:39:02,720 INFO [finetune.py:976] (0/7) Epoch 8, batch 650, loss[loss=0.1837, simple_loss=0.2498, pruned_loss=0.05874, over 4760.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2635, pruned_loss=0.06779, over 920473.14 frames. ], batch size: 27, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:39:14,001 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-26 20:40:08,308 INFO [finetune.py:976] (0/7) Epoch 8, batch 700, loss[loss=0.1856, simple_loss=0.2585, pruned_loss=0.05636, over 4848.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2658, pruned_loss=0.06899, over 927774.51 frames. ], batch size: 44, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:40:08,436 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9633, 2.5677, 2.0110, 2.2538, 1.7677, 1.9979, 2.0596, 1.6720], + device='cuda:0'), covar=tensor([0.2258, 0.1253, 0.0960, 0.1370, 0.3273, 0.1450, 0.2146, 0.3017], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0320, 0.0232, 0.0293, 0.0316, 0.0273, 0.0261, 0.0285], + device='cuda:0'), out_proj_covar=tensor([1.2205e-04, 1.2938e-04, 9.3931e-05, 1.1753e-04, 1.3017e-04, 1.1048e-04, + 1.0727e-04, 1.1454e-04], device='cuda:0') +2023-04-26 20:40:12,786 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0708, 2.2359, 1.7918, 1.8815, 2.3734, 1.7211, 2.7601, 1.5962], + device='cuda:0'), covar=tensor([0.3965, 0.1924, 0.5703, 0.2936, 0.1601, 0.2736, 0.1161, 0.4824], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0352, 0.0434, 0.0365, 0.0389, 0.0383, 0.0386, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:40:55,212 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.764e+02 2.066e+02 2.587e+02 3.741e+02, threshold=4.132e+02, percent-clipped=0.0 +2023-04-26 20:41:14,341 INFO [finetune.py:976] (0/7) Epoch 8, batch 750, loss[loss=0.2478, simple_loss=0.3025, pruned_loss=0.09658, over 4822.00 frames. ], tot_loss[loss=0.2025, simple_loss=0.2667, pruned_loss=0.06922, over 935093.48 frames. ], batch size: 25, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:41:16,846 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=40848.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:41:24,332 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40851.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:41:38,709 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40865.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:41:42,266 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8613, 1.9341, 1.6682, 1.5589, 2.0486, 1.6138, 2.4085, 1.5051], + device='cuda:0'), covar=tensor([0.3878, 0.1693, 0.4585, 0.2933, 0.1532, 0.2487, 0.1287, 0.4172], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0347, 0.0427, 0.0360, 0.0384, 0.0378, 0.0379, 0.0414], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:41:42,880 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8378, 2.5600, 1.9671, 1.7815, 1.3462, 1.3748, 1.9268, 1.3225], + device='cuda:0'), covar=tensor([0.1869, 0.1468, 0.1606, 0.2011, 0.2594, 0.2148, 0.1164, 0.2239], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0216, 0.0172, 0.0204, 0.0206, 0.0185, 0.0163, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 20:41:44,645 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40874.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:41:57,693 INFO [finetune.py:976] (0/7) Epoch 8, batch 800, loss[loss=0.1476, simple_loss=0.2152, pruned_loss=0.04002, over 4798.00 frames. ], tot_loss[loss=0.2008, simple_loss=0.2656, pruned_loss=0.06796, over 941134.41 frames. ], batch size: 25, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:42:00,805 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=40899.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:42:01,452 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40900.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:42:16,686 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=40922.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:42:19,180 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40926.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:42:20,377 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40928.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:42:22,146 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.615e+02 1.856e+02 2.266e+02 4.587e+02, threshold=3.712e+02, percent-clipped=2.0 +2023-04-26 20:42:31,003 INFO [finetune.py:976] (0/7) Epoch 8, batch 850, loss[loss=0.2064, simple_loss=0.2621, pruned_loss=0.07538, over 4828.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2633, pruned_loss=0.06759, over 944438.48 frames. ], batch size: 30, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:42:37,255 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.81 vs. limit=5.0 +2023-04-26 20:42:41,337 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=40961.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 20:42:52,265 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=40976.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:42:52,323 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1410, 2.2215, 1.3699, 1.8800, 2.3455, 2.0780, 1.9500, 1.9565], + device='cuda:0'), covar=tensor([0.0469, 0.0346, 0.0327, 0.0524, 0.0246, 0.0500, 0.0522, 0.0533], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0050, 0.0038, 0.0049, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 20:42:58,819 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=40986.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:43:03,441 INFO [finetune.py:976] (0/7) Epoch 8, batch 900, loss[loss=0.1663, simple_loss=0.2298, pruned_loss=0.05138, over 4755.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2602, pruned_loss=0.06659, over 946469.08 frames. ], batch size: 23, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:43:05,766 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=40997.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:43:12,068 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41007.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:43:19,936 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5905, 1.4640, 0.7768, 1.3080, 1.7104, 1.5100, 1.4044, 1.3920], + device='cuda:0'), covar=tensor([0.0516, 0.0397, 0.0407, 0.0562, 0.0294, 0.0537, 0.0486, 0.0598], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0050, 0.0038, 0.0049, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 20:43:28,962 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.671e+02 1.992e+02 2.459e+02 6.072e+02, threshold=3.985e+02, percent-clipped=2.0 +2023-04-26 20:43:30,876 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=41034.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:43:37,319 INFO [finetune.py:976] (0/7) Epoch 8, batch 950, loss[loss=0.2122, simple_loss=0.2663, pruned_loss=0.0791, over 4916.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2588, pruned_loss=0.06632, over 948382.94 frames. ], batch size: 38, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:43:38,038 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41045.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:43:46,389 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41058.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:44:10,578 INFO [finetune.py:976] (0/7) Epoch 8, batch 1000, loss[loss=0.1877, simple_loss=0.2576, pruned_loss=0.05891, over 4795.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2608, pruned_loss=0.06714, over 948598.93 frames. ], batch size: 25, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:44:15,996 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41102.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:44:18,390 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41106.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:44:31,373 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2643, 2.4489, 1.0465, 1.4524, 1.9952, 1.2911, 3.4483, 1.9507], + device='cuda:0'), covar=tensor([0.0599, 0.0614, 0.0752, 0.1246, 0.0460, 0.0970, 0.0235, 0.0557], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0068, 0.0050, 0.0048, 0.0052, 0.0053, 0.0079, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0007], + device='cuda:0') +2023-04-26 20:44:34,382 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.78 vs. limit=5.0 +2023-04-26 20:44:35,828 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.783e+02 2.181e+02 2.591e+02 5.810e+02, threshold=4.362e+02, percent-clipped=3.0 +2023-04-26 20:44:44,111 INFO [finetune.py:976] (0/7) Epoch 8, batch 1050, loss[loss=0.1743, simple_loss=0.2427, pruned_loss=0.05297, over 4895.00 frames. ], tot_loss[loss=0.1991, simple_loss=0.2626, pruned_loss=0.0678, over 951040.78 frames. ], batch size: 35, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:44:46,606 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41148.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:44:49,609 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0469, 4.3800, 0.8384, 2.3142, 2.6579, 2.9012, 2.5949, 0.9163], + device='cuda:0'), covar=tensor([0.1167, 0.0848, 0.1993, 0.1223, 0.0894, 0.1020, 0.1284, 0.2107], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0254, 0.0142, 0.0124, 0.0136, 0.0155, 0.0121, 0.0123], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 20:44:56,174 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41163.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:45:27,977 INFO [finetune.py:976] (0/7) Epoch 8, batch 1100, loss[loss=0.2493, simple_loss=0.3103, pruned_loss=0.09414, over 4718.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2632, pruned_loss=0.06769, over 951876.06 frames. ], batch size: 54, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:45:29,266 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=41196.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:45:50,742 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41221.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:45:58,236 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.713e+02 2.125e+02 2.514e+02 5.728e+02, threshold=4.249e+02, percent-clipped=2.0 +2023-04-26 20:46:17,814 INFO [finetune.py:976] (0/7) Epoch 8, batch 1150, loss[loss=0.2282, simple_loss=0.3025, pruned_loss=0.07697, over 4898.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2649, pruned_loss=0.06819, over 951759.22 frames. ], batch size: 35, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:46:36,347 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41256.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 20:46:39,549 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-26 20:47:24,548 INFO [finetune.py:976] (0/7) Epoch 8, batch 1200, loss[loss=0.1937, simple_loss=0.2529, pruned_loss=0.06723, over 4873.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2633, pruned_loss=0.06753, over 954219.77 frames. ], batch size: 32, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:47:44,096 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41307.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:47:50,767 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9335, 1.4174, 1.4928, 1.5845, 2.0748, 1.7049, 1.3320, 1.4668], + device='cuda:0'), covar=tensor([0.1585, 0.1541, 0.2030, 0.1186, 0.0877, 0.1571, 0.2359, 0.1998], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0327, 0.0355, 0.0302, 0.0341, 0.0328, 0.0309, 0.0353], + device='cuda:0'), out_proj_covar=tensor([6.5526e-05, 6.9487e-05, 7.6851e-05, 6.2649e-05, 7.1809e-05, 7.0662e-05, + 6.6863e-05, 7.5861e-05], device='cuda:0') +2023-04-26 20:47:58,666 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7783, 1.6374, 1.7571, 2.1086, 2.0763, 1.7070, 1.4287, 1.8157], + device='cuda:0'), covar=tensor([0.0848, 0.1114, 0.0684, 0.0596, 0.0567, 0.0897, 0.0806, 0.0614], + device='cuda:0'), in_proj_covar=tensor([0.0200, 0.0207, 0.0183, 0.0180, 0.0179, 0.0192, 0.0163, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:47:59,151 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.011e+02 1.672e+02 1.949e+02 2.366e+02 5.490e+02, threshold=3.899e+02, percent-clipped=2.0 +2023-04-26 20:48:02,906 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-26 20:48:08,615 INFO [finetune.py:976] (0/7) Epoch 8, batch 1250, loss[loss=0.1767, simple_loss=0.2549, pruned_loss=0.04927, over 4832.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2601, pruned_loss=0.06578, over 954140.73 frames. ], batch size: 30, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:48:14,610 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41353.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:48:15,816 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=41355.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:48:42,247 INFO [finetune.py:976] (0/7) Epoch 8, batch 1300, loss[loss=0.1953, simple_loss=0.2487, pruned_loss=0.07097, over 4823.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2566, pruned_loss=0.06429, over 954180.65 frames. ], batch size: 30, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:48:46,731 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41401.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:48:49,715 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8690, 2.1343, 1.0686, 1.5776, 2.2107, 1.7865, 1.6085, 1.8053], + device='cuda:0'), covar=tensor([0.0525, 0.0380, 0.0348, 0.0561, 0.0259, 0.0564, 0.0542, 0.0588], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0050, 0.0038, 0.0049, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 20:48:57,684 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6399, 1.2314, 1.7507, 2.0723, 1.7570, 1.5981, 1.6587, 1.6898], + device='cuda:0'), covar=tensor([0.6979, 0.9594, 0.9215, 0.9731, 0.8216, 1.1363, 1.1167, 1.0341], + device='cuda:0'), in_proj_covar=tensor([0.0410, 0.0427, 0.0509, 0.0530, 0.0438, 0.0457, 0.0469, 0.0467], + device='cuda:0'), out_proj_covar=tensor([9.9593e-05, 1.0593e-04, 1.1507e-04, 1.2577e-04, 1.0655e-04, 1.1077e-04, + 1.1299e-04, 1.1342e-04], device='cuda:0') +2023-04-26 20:49:05,851 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.753e+02 2.019e+02 2.733e+02 6.111e+02, threshold=4.038e+02, percent-clipped=8.0 +2023-04-26 20:49:15,122 INFO [finetune.py:976] (0/7) Epoch 8, batch 1350, loss[loss=0.1659, simple_loss=0.2304, pruned_loss=0.05065, over 4820.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2563, pruned_loss=0.06418, over 952754.98 frames. ], batch size: 30, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:49:25,053 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=41458.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:49:44,873 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.80 vs. limit=5.0 +2023-04-26 20:49:48,137 INFO [finetune.py:976] (0/7) Epoch 8, batch 1400, loss[loss=0.222, simple_loss=0.29, pruned_loss=0.07702, over 4927.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2609, pruned_loss=0.06577, over 953101.62 frames. ], batch size: 33, lr: 3.84e-03, grad_scale: 64.0 +2023-04-26 20:50:06,398 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41521.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:50:12,930 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.248e+02 1.632e+02 2.112e+02 2.588e+02 6.346e+02, threshold=4.225e+02, percent-clipped=4.0 +2023-04-26 20:50:21,305 INFO [finetune.py:976] (0/7) Epoch 8, batch 1450, loss[loss=0.2153, simple_loss=0.2678, pruned_loss=0.08136, over 4805.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2622, pruned_loss=0.066, over 953522.54 frames. ], batch size: 25, lr: 3.84e-03, grad_scale: 64.0 +2023-04-26 20:50:30,572 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41556.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 20:50:31,789 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5906, 1.6496, 0.9011, 1.2643, 1.7429, 1.4535, 1.3826, 1.4361], + device='cuda:0'), covar=tensor([0.0531, 0.0378, 0.0401, 0.0557, 0.0307, 0.0529, 0.0500, 0.0580], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0050, 0.0038, 0.0049, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 20:50:34,664 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6256, 4.1636, 0.7490, 2.1975, 2.3435, 2.8267, 2.3720, 0.8804], + device='cuda:0'), covar=tensor([0.1441, 0.0978, 0.2228, 0.1346, 0.0996, 0.1095, 0.1571, 0.2246], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0254, 0.0142, 0.0124, 0.0136, 0.0155, 0.0121, 0.0123], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 20:50:38,926 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=41569.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:50:54,403 INFO [finetune.py:976] (0/7) Epoch 8, batch 1500, loss[loss=0.2087, simple_loss=0.273, pruned_loss=0.07219, over 4808.00 frames. ], tot_loss[loss=0.1992, simple_loss=0.2643, pruned_loss=0.06704, over 953877.70 frames. ], batch size: 45, lr: 3.84e-03, grad_scale: 64.0 +2023-04-26 20:51:02,362 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=41604.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:51:09,741 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 20:51:25,118 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.649e+02 2.032e+02 2.388e+02 6.025e+02, threshold=4.063e+02, percent-clipped=2.0 +2023-04-26 20:51:35,302 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4674, 1.6630, 1.7073, 1.9960, 1.8337, 2.1221, 1.5357, 4.4132], + device='cuda:0'), covar=tensor([0.0618, 0.0764, 0.0767, 0.1191, 0.0677, 0.0581, 0.0758, 0.0161], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0040, 0.0044, 0.0040, 0.0040, 0.0039, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 20:51:38,880 INFO [finetune.py:976] (0/7) Epoch 8, batch 1550, loss[loss=0.1763, simple_loss=0.2439, pruned_loss=0.05437, over 4811.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2638, pruned_loss=0.06685, over 953032.66 frames. ], batch size: 39, lr: 3.84e-03, grad_scale: 64.0 +2023-04-26 20:51:51,054 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41653.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:51:58,457 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41654.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:52:47,104 INFO [finetune.py:976] (0/7) Epoch 8, batch 1600, loss[loss=0.2101, simple_loss=0.2726, pruned_loss=0.07378, over 4907.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2616, pruned_loss=0.06608, over 953581.70 frames. ], batch size: 37, lr: 3.84e-03, grad_scale: 64.0 +2023-04-26 20:52:56,981 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:52:57,009 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41701.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:53:12,225 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41715.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 20:53:22,287 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.620e+02 1.919e+02 2.261e+02 4.784e+02, threshold=3.838e+02, percent-clipped=1.0 +2023-04-26 20:53:25,543 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-26 20:53:29,572 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8813, 2.8620, 2.3992, 2.4893, 2.1662, 2.3366, 2.5130, 1.8101], + device='cuda:0'), covar=tensor([0.3023, 0.1384, 0.0963, 0.1467, 0.2860, 0.1323, 0.2190, 0.3039], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0325, 0.0235, 0.0296, 0.0321, 0.0277, 0.0264, 0.0289], + device='cuda:0'), out_proj_covar=tensor([1.2369e-04, 1.3123e-04, 9.5026e-05, 1.1853e-04, 1.3209e-04, 1.1178e-04, + 1.0828e-04, 1.1601e-04], device='cuda:0') +2023-04-26 20:53:30,671 INFO [finetune.py:976] (0/7) Epoch 8, batch 1650, loss[loss=0.2045, simple_loss=0.2641, pruned_loss=0.0725, over 4938.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.258, pruned_loss=0.06493, over 954843.21 frames. ], batch size: 33, lr: 3.84e-03, grad_scale: 64.0 +2023-04-26 20:53:32,035 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3480, 1.5779, 1.6051, 2.1419, 2.3423, 1.9938, 2.0024, 1.8406], + device='cuda:0'), covar=tensor([0.1680, 0.1833, 0.1918, 0.1628, 0.1726, 0.1762, 0.2113, 0.1810], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0327, 0.0356, 0.0303, 0.0342, 0.0329, 0.0311, 0.0355], + device='cuda:0'), out_proj_covar=tensor([6.5698e-05, 6.9604e-05, 7.7053e-05, 6.2860e-05, 7.1934e-05, 7.0912e-05, + 6.7203e-05, 7.6366e-05], device='cuda:0') +2023-04-26 20:53:33,789 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=41749.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:53:36,268 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5979, 1.4615, 4.1699, 3.8015, 3.6759, 3.9172, 4.0244, 3.6695], + device='cuda:0'), covar=tensor([0.7219, 0.5788, 0.1267, 0.2069, 0.1335, 0.1511, 0.1207, 0.1645], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0306, 0.0410, 0.0415, 0.0350, 0.0408, 0.0319, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 20:53:40,291 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=41758.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:53:40,520 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-26 20:53:46,870 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-26 20:54:03,919 INFO [finetune.py:976] (0/7) Epoch 8, batch 1700, loss[loss=0.2269, simple_loss=0.2804, pruned_loss=0.08672, over 4759.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2563, pruned_loss=0.06407, over 955275.59 frames. ], batch size: 54, lr: 3.84e-03, grad_scale: 64.0 +2023-04-26 20:54:11,033 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-26 20:54:11,550 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=41806.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:54:29,248 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.116e+02 1.826e+02 2.142e+02 2.567e+02 5.646e+02, threshold=4.284e+02, percent-clipped=4.0 +2023-04-26 20:54:35,942 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1788, 2.6356, 1.0183, 1.3657, 2.0415, 1.2349, 3.7956, 1.8579], + device='cuda:0'), covar=tensor([0.0679, 0.0796, 0.0884, 0.1397, 0.0551, 0.1110, 0.0221, 0.0681], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0068, 0.0050, 0.0048, 0.0052, 0.0053, 0.0079, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0007], + device='cuda:0') +2023-04-26 20:54:37,045 INFO [finetune.py:976] (0/7) Epoch 8, batch 1750, loss[loss=0.235, simple_loss=0.2977, pruned_loss=0.0861, over 4873.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2601, pruned_loss=0.0659, over 955246.84 frames. ], batch size: 34, lr: 3.84e-03, grad_scale: 64.0 +2023-04-26 20:54:40,665 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2708, 2.9923, 1.0103, 1.5536, 1.6763, 2.1347, 1.7872, 0.9613], + device='cuda:0'), covar=tensor([0.1582, 0.1188, 0.1947, 0.1533, 0.1199, 0.1063, 0.1658, 0.1924], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0253, 0.0142, 0.0124, 0.0136, 0.0155, 0.0120, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 20:54:58,440 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6345, 0.7270, 1.2989, 1.9340, 1.6953, 1.4902, 1.4508, 1.5235], + device='cuda:0'), covar=tensor([0.6562, 0.8754, 0.9204, 0.9283, 0.7991, 1.0145, 1.0707, 0.8997], + device='cuda:0'), in_proj_covar=tensor([0.0407, 0.0426, 0.0507, 0.0528, 0.0438, 0.0455, 0.0468, 0.0466], + device='cuda:0'), out_proj_covar=tensor([9.9045e-05, 1.0553e-04, 1.1458e-04, 1.2534e-04, 1.0635e-04, 1.1019e-04, + 1.1264e-04, 1.1314e-04], device='cuda:0') +2023-04-26 20:55:10,265 INFO [finetune.py:976] (0/7) Epoch 8, batch 1800, loss[loss=0.2022, simple_loss=0.2641, pruned_loss=0.07014, over 4766.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2633, pruned_loss=0.06693, over 956491.74 frames. ], batch size: 28, lr: 3.84e-03, grad_scale: 64.0 +2023-04-26 20:55:18,144 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41906.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:55:35,583 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.841e+02 2.087e+02 2.578e+02 5.698e+02, threshold=4.173e+02, percent-clipped=3.0 +2023-04-26 20:55:43,854 INFO [finetune.py:976] (0/7) Epoch 8, batch 1850, loss[loss=0.1799, simple_loss=0.2522, pruned_loss=0.05383, over 4859.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.264, pruned_loss=0.06682, over 956158.11 frames. ], batch size: 31, lr: 3.84e-03, grad_scale: 64.0 +2023-04-26 20:55:46,368 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=41948.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 20:55:58,825 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=41967.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 20:56:17,000 INFO [finetune.py:976] (0/7) Epoch 8, batch 1900, loss[loss=0.1925, simple_loss=0.268, pruned_loss=0.05855, over 4914.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2655, pruned_loss=0.0675, over 954958.81 frames. ], batch size: 37, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:56:20,813 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-42000.pt +2023-04-26 20:56:27,967 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42009.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 20:56:28,513 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42010.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 20:56:49,885 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.688e+02 2.018e+02 2.426e+02 3.814e+02, threshold=4.036e+02, percent-clipped=0.0 +2023-04-26 20:56:56,156 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0414, 3.7117, 1.4191, 2.1753, 2.3604, 2.6292, 2.3628, 1.4458], + device='cuda:0'), covar=tensor([0.1182, 0.1006, 0.1798, 0.1276, 0.0923, 0.1106, 0.1344, 0.1721], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0250, 0.0140, 0.0122, 0.0134, 0.0153, 0.0119, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 20:57:08,687 INFO [finetune.py:976] (0/7) Epoch 8, batch 1950, loss[loss=0.1686, simple_loss=0.2348, pruned_loss=0.05118, over 4875.00 frames. ], tot_loss[loss=0.1984, simple_loss=0.2638, pruned_loss=0.06655, over 953706.12 frames. ], batch size: 34, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:58:13,786 INFO [finetune.py:976] (0/7) Epoch 8, batch 2000, loss[loss=0.1786, simple_loss=0.2381, pruned_loss=0.05951, over 4902.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.261, pruned_loss=0.06577, over 953679.54 frames. ], batch size: 37, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:59:06,779 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.619e+02 1.918e+02 2.281e+02 4.896e+02, threshold=3.837e+02, percent-clipped=1.0 +2023-04-26 20:59:17,309 INFO [finetune.py:976] (0/7) Epoch 8, batch 2050, loss[loss=0.1857, simple_loss=0.2429, pruned_loss=0.06425, over 4748.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2568, pruned_loss=0.06409, over 955651.38 frames. ], batch size: 54, lr: 3.84e-03, grad_scale: 32.0 +2023-04-26 20:59:47,477 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-26 21:00:12,508 INFO [finetune.py:976] (0/7) Epoch 8, batch 2100, loss[loss=0.1744, simple_loss=0.239, pruned_loss=0.0549, over 4757.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2556, pruned_loss=0.06357, over 956734.27 frames. ], batch size: 26, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:00:38,851 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.610e+02 1.994e+02 2.459e+02 7.557e+02, threshold=3.988e+02, percent-clipped=2.0 +2023-04-26 21:00:45,650 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7858, 1.3708, 1.8742, 2.1939, 1.8690, 1.7351, 1.8171, 1.8349], + device='cuda:0'), covar=tensor([0.7272, 0.9233, 0.8767, 0.9386, 0.8380, 1.1469, 1.0935, 0.9646], + device='cuda:0'), in_proj_covar=tensor([0.0406, 0.0423, 0.0504, 0.0527, 0.0435, 0.0453, 0.0465, 0.0465], + device='cuda:0'), out_proj_covar=tensor([9.8714e-05, 1.0477e-04, 1.1401e-04, 1.2490e-04, 1.0570e-04, 1.0967e-04, + 1.1191e-04, 1.1274e-04], device='cuda:0') +2023-04-26 21:00:46,726 INFO [finetune.py:976] (0/7) Epoch 8, batch 2150, loss[loss=0.1789, simple_loss=0.2486, pruned_loss=0.05458, over 4910.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2576, pruned_loss=0.06382, over 956130.49 frames. ], batch size: 37, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:00:58,107 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42262.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:01:20,322 INFO [finetune.py:976] (0/7) Epoch 8, batch 2200, loss[loss=0.2025, simple_loss=0.266, pruned_loss=0.06955, over 4820.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2618, pruned_loss=0.06547, over 956641.98 frames. ], batch size: 49, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:01:26,904 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42304.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 21:01:30,512 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42310.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 21:01:44,806 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.704e+02 2.094e+02 2.427e+02 3.760e+02, threshold=4.188e+02, percent-clipped=0.0 +2023-04-26 21:01:52,972 INFO [finetune.py:976] (0/7) Epoch 8, batch 2250, loss[loss=0.1942, simple_loss=0.2757, pruned_loss=0.05638, over 4916.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2638, pruned_loss=0.06637, over 956149.42 frames. ], batch size: 42, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:02:02,507 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=42358.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:02:07,526 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-26 21:02:15,783 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42379.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:02:26,180 INFO [finetune.py:976] (0/7) Epoch 8, batch 2300, loss[loss=0.1614, simple_loss=0.2216, pruned_loss=0.05064, over 4086.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2647, pruned_loss=0.06624, over 956433.59 frames. ], batch size: 17, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:02:41,488 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9512, 1.4911, 1.7622, 1.5980, 1.7203, 1.4563, 0.7368, 1.3527], + device='cuda:0'), covar=tensor([0.3890, 0.3969, 0.1872, 0.3284, 0.3177, 0.3124, 0.5000, 0.2730], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0254, 0.0221, 0.0325, 0.0216, 0.0231, 0.0238, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 21:02:50,915 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.665e+02 1.908e+02 2.261e+02 6.563e+02, threshold=3.817e+02, percent-clipped=1.0 +2023-04-26 21:02:56,304 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42440.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:02:59,687 INFO [finetune.py:976] (0/7) Epoch 8, batch 2350, loss[loss=0.2045, simple_loss=0.2702, pruned_loss=0.06945, over 4881.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2618, pruned_loss=0.06504, over 956196.75 frames. ], batch size: 43, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:03:32,428 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-26 21:03:48,469 INFO [finetune.py:976] (0/7) Epoch 8, batch 2400, loss[loss=0.1489, simple_loss=0.2178, pruned_loss=0.03995, over 4036.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2588, pruned_loss=0.06437, over 954905.59 frames. ], batch size: 17, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:04:08,563 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42506.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:04:40,674 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.697e+02 2.030e+02 2.440e+02 3.399e+02, threshold=4.060e+02, percent-clipped=0.0 +2023-04-26 21:04:44,408 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6833, 1.1679, 1.6382, 2.1266, 1.7648, 1.6005, 1.6603, 1.7055], + device='cuda:0'), covar=tensor([0.6828, 0.9408, 0.9897, 0.9227, 0.8275, 1.1992, 1.1596, 0.9904], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0426, 0.0508, 0.0531, 0.0439, 0.0458, 0.0471, 0.0468], + device='cuda:0'), out_proj_covar=tensor([9.9416e-05, 1.0561e-04, 1.1490e-04, 1.2592e-04, 1.0674e-04, 1.1084e-04, + 1.1327e-04, 1.1346e-04], device='cuda:0') +2023-04-26 21:04:54,581 INFO [finetune.py:976] (0/7) Epoch 8, batch 2450, loss[loss=0.1796, simple_loss=0.2445, pruned_loss=0.05734, over 4771.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2563, pruned_loss=0.06408, over 956069.59 frames. ], batch size: 26, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:05:24,340 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42562.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:05:27,970 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42567.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:06:00,484 INFO [finetune.py:976] (0/7) Epoch 8, batch 2500, loss[loss=0.2271, simple_loss=0.2949, pruned_loss=0.07965, over 4304.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2576, pruned_loss=0.06487, over 954197.67 frames. ], batch size: 65, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:06:20,315 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=42604.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 21:06:29,256 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=42610.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:06:54,512 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.868e+02 2.156e+02 2.482e+02 4.292e+02, threshold=4.312e+02, percent-clipped=2.0 +2023-04-26 21:07:08,191 INFO [finetune.py:976] (0/7) Epoch 8, batch 2550, loss[loss=0.2026, simple_loss=0.2699, pruned_loss=0.06765, over 4834.00 frames. ], tot_loss[loss=0.198, simple_loss=0.2627, pruned_loss=0.06666, over 954841.05 frames. ], batch size: 49, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:07:08,300 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7357, 1.4889, 1.6899, 2.0568, 2.0586, 1.7018, 1.3976, 1.8457], + device='cuda:0'), covar=tensor([0.0707, 0.0968, 0.0571, 0.0451, 0.0475, 0.0704, 0.0803, 0.0465], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0205, 0.0182, 0.0178, 0.0179, 0.0190, 0.0161, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:07:11,872 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42649.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:07:14,665 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=42652.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 21:07:20,185 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4742, 0.8706, 1.4851, 1.9620, 1.5930, 1.4361, 1.4365, 1.5177], + device='cuda:0'), covar=tensor([0.6345, 0.9524, 0.8447, 0.8939, 0.8221, 1.1373, 1.1153, 0.9751], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0427, 0.0509, 0.0531, 0.0440, 0.0459, 0.0470, 0.0469], + device='cuda:0'), out_proj_covar=tensor([9.9471e-05, 1.0593e-04, 1.1504e-04, 1.2611e-04, 1.0694e-04, 1.1100e-04, + 1.1322e-04, 1.1372e-04], device='cuda:0') +2023-04-26 21:07:41,702 INFO [finetune.py:976] (0/7) Epoch 8, batch 2600, loss[loss=0.1986, simple_loss=0.259, pruned_loss=0.06906, over 4821.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.264, pruned_loss=0.06751, over 955106.09 frames. ], batch size: 30, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:07:53,069 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42710.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:08:07,493 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.967e+01 1.665e+02 1.922e+02 2.451e+02 5.774e+02, threshold=3.843e+02, percent-clipped=4.0 +2023-04-26 21:08:09,405 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42735.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:08:14,788 INFO [finetune.py:976] (0/7) Epoch 8, batch 2650, loss[loss=0.2484, simple_loss=0.3066, pruned_loss=0.09506, over 4910.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2654, pruned_loss=0.06784, over 954625.56 frames. ], batch size: 38, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:08:48,363 INFO [finetune.py:976] (0/7) Epoch 8, batch 2700, loss[loss=0.1849, simple_loss=0.2445, pruned_loss=0.06267, over 4765.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2634, pruned_loss=0.06614, over 954331.15 frames. ], batch size: 28, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:08:53,378 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9498, 2.3560, 0.8989, 1.1646, 1.5480, 1.1991, 2.5167, 1.3905], + device='cuda:0'), covar=tensor([0.0659, 0.0559, 0.0674, 0.1357, 0.0468, 0.1005, 0.0276, 0.0720], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0068, 0.0050, 0.0048, 0.0052, 0.0052, 0.0079, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-26 21:09:14,552 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.600e+01 1.577e+02 1.842e+02 2.218e+02 2.993e+02, threshold=3.685e+02, percent-clipped=0.0 +2023-04-26 21:09:21,931 INFO [finetune.py:976] (0/7) Epoch 8, batch 2750, loss[loss=0.2021, simple_loss=0.263, pruned_loss=0.07066, over 4795.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2612, pruned_loss=0.06594, over 954470.19 frames. ], batch size: 29, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:09:31,535 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42859.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:09:34,270 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=42862.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:09:56,823 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=42879.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:10:16,378 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.06 vs. limit=5.0 +2023-04-26 21:10:17,377 INFO [finetune.py:976] (0/7) Epoch 8, batch 2800, loss[loss=0.2114, simple_loss=0.2756, pruned_loss=0.0736, over 4731.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2579, pruned_loss=0.06465, over 956919.98 frames. ], batch size: 23, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:10:30,194 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-26 21:10:51,631 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42920.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:11:10,645 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.665e+02 1.974e+02 2.383e+02 4.846e+02, threshold=3.948e+02, percent-clipped=3.0 +2023-04-26 21:11:16,189 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=42940.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:11:18,534 INFO [finetune.py:976] (0/7) Epoch 8, batch 2850, loss[loss=0.2236, simple_loss=0.284, pruned_loss=0.08167, over 4833.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2587, pruned_loss=0.06566, over 955969.90 frames. ], batch size: 39, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:11:30,281 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.21 vs. limit=5.0 +2023-04-26 21:11:56,701 INFO [finetune.py:976] (0/7) Epoch 8, batch 2900, loss[loss=0.2057, simple_loss=0.2719, pruned_loss=0.06973, over 4824.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2636, pruned_loss=0.06747, over 955473.85 frames. ], batch size: 33, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:12:05,105 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4275, 1.4245, 1.7396, 1.7419, 1.3988, 1.2046, 1.4633, 0.9847], + device='cuda:0'), covar=tensor([0.0823, 0.0896, 0.0582, 0.0807, 0.0954, 0.1254, 0.0925, 0.0986], + device='cuda:0'), in_proj_covar=tensor([0.0065, 0.0073, 0.0072, 0.0067, 0.0076, 0.0095, 0.0079, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 21:12:15,322 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43005.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:12:51,483 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.808e+02 2.091e+02 2.545e+02 3.466e+02, threshold=4.182e+02, percent-clipped=0.0 +2023-04-26 21:12:59,352 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43035.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:13:01,688 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1009, 0.6859, 0.9064, 0.6983, 1.2447, 0.9708, 0.8395, 0.9188], + device='cuda:0'), covar=tensor([0.1623, 0.1646, 0.1949, 0.1683, 0.0965, 0.1480, 0.1872, 0.2155], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0324, 0.0355, 0.0303, 0.0342, 0.0326, 0.0310, 0.0355], + device='cuda:0'), out_proj_covar=tensor([6.5589e-05, 6.8901e-05, 7.6940e-05, 6.2778e-05, 7.1997e-05, 7.0227e-05, + 6.6929e-05, 7.6286e-05], device='cuda:0') +2023-04-26 21:13:02,993 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-26 21:13:10,702 INFO [finetune.py:976] (0/7) Epoch 8, batch 2950, loss[loss=0.2309, simple_loss=0.2994, pruned_loss=0.08115, over 4807.00 frames. ], tot_loss[loss=0.2019, simple_loss=0.2664, pruned_loss=0.06869, over 954550.29 frames. ], batch size: 40, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:13:37,376 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=43083.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:13:44,489 INFO [finetune.py:976] (0/7) Epoch 8, batch 3000, loss[loss=0.2144, simple_loss=0.286, pruned_loss=0.07143, over 4915.00 frames. ], tot_loss[loss=0.2023, simple_loss=0.2672, pruned_loss=0.06876, over 955973.11 frames. ], batch size: 38, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:13:44,490 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 21:13:49,162 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6488, 1.4395, 0.6549, 1.2889, 1.4230, 1.5084, 1.4037, 1.3850], + device='cuda:0'), covar=tensor([0.0523, 0.0407, 0.0411, 0.0577, 0.0300, 0.0538, 0.0546, 0.0607], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0050, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 21:13:49,265 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2042, 1.5741, 1.4211, 1.8175, 1.6087, 1.6412, 1.4292, 2.9576], + device='cuda:0'), covar=tensor([0.0614, 0.0706, 0.0713, 0.1133, 0.0616, 0.0522, 0.0703, 0.0197], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 21:13:54,962 INFO [finetune.py:1010] (0/7) Epoch 8, validation: loss=0.1551, simple_loss=0.2273, pruned_loss=0.04149, over 2265189.00 frames. +2023-04-26 21:13:54,963 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 21:14:01,339 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-26 21:14:18,553 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.720e+02 2.087e+02 2.508e+02 4.995e+02, threshold=4.174e+02, percent-clipped=1.0 +2023-04-26 21:14:27,803 INFO [finetune.py:976] (0/7) Epoch 8, batch 3050, loss[loss=0.2258, simple_loss=0.2921, pruned_loss=0.07969, over 4842.00 frames. ], tot_loss[loss=0.2027, simple_loss=0.2676, pruned_loss=0.06891, over 955330.11 frames. ], batch size: 44, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:14:36,814 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.64 vs. limit=5.0 +2023-04-26 21:14:39,650 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43162.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:15:00,984 INFO [finetune.py:976] (0/7) Epoch 8, batch 3100, loss[loss=0.1688, simple_loss=0.2314, pruned_loss=0.05312, over 4764.00 frames. ], tot_loss[loss=0.1995, simple_loss=0.2641, pruned_loss=0.06743, over 955843.87 frames. ], batch size: 28, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:15:11,984 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=43210.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:15:15,097 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43215.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:15:25,080 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-26 21:15:25,428 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.610e+02 1.868e+02 2.269e+02 4.698e+02, threshold=3.736e+02, percent-clipped=1.0 +2023-04-26 21:15:27,879 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43235.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:15:34,186 INFO [finetune.py:976] (0/7) Epoch 8, batch 3150, loss[loss=0.1844, simple_loss=0.2508, pruned_loss=0.059, over 4765.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2606, pruned_loss=0.06552, over 954812.71 frames. ], batch size: 28, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:15:37,818 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1939, 2.9154, 0.8510, 1.4844, 1.6186, 1.9712, 1.7197, 0.9217], + device='cuda:0'), covar=tensor([0.1772, 0.1649, 0.2269, 0.1761, 0.1269, 0.1322, 0.1708, 0.2109], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0252, 0.0142, 0.0124, 0.0136, 0.0155, 0.0121, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 21:15:51,005 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-26 21:16:18,110 INFO [finetune.py:976] (0/7) Epoch 8, batch 3200, loss[loss=0.1917, simple_loss=0.2563, pruned_loss=0.06352, over 4897.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2575, pruned_loss=0.06491, over 955623.47 frames. ], batch size: 32, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:16:30,934 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43305.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:17:03,559 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.615e+02 2.007e+02 2.471e+02 4.400e+02, threshold=4.014e+02, percent-clipped=2.0 +2023-04-26 21:17:20,616 INFO [finetune.py:976] (0/7) Epoch 8, batch 3250, loss[loss=0.2561, simple_loss=0.3092, pruned_loss=0.1015, over 4799.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2579, pruned_loss=0.06488, over 954601.46 frames. ], batch size: 51, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:17:33,417 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=43353.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:17:46,396 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6208, 1.7888, 1.9150, 2.4723, 2.6524, 2.2339, 2.0859, 2.0566], + device='cuda:0'), covar=tensor([0.1575, 0.2335, 0.2441, 0.1837, 0.1488, 0.1893, 0.2857, 0.2125], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0328, 0.0357, 0.0305, 0.0345, 0.0329, 0.0314, 0.0358], + device='cuda:0'), out_proj_covar=tensor([6.6223e-05, 6.9814e-05, 7.7299e-05, 6.3348e-05, 7.2593e-05, 7.0830e-05, + 6.7634e-05, 7.6918e-05], device='cuda:0') +2023-04-26 21:18:26,604 INFO [finetune.py:976] (0/7) Epoch 8, batch 3300, loss[loss=0.1957, simple_loss=0.274, pruned_loss=0.05865, over 4764.00 frames. ], tot_loss[loss=0.1981, simple_loss=0.2626, pruned_loss=0.06674, over 954146.99 frames. ], batch size: 28, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:18:28,484 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4195, 1.7699, 1.2664, 0.9951, 1.1004, 1.1092, 1.2836, 1.0359], + device='cuda:0'), covar=tensor([0.1958, 0.1539, 0.1837, 0.2217, 0.2743, 0.2269, 0.1337, 0.2355], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0219, 0.0173, 0.0207, 0.0207, 0.0185, 0.0164, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 21:18:51,539 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-26 21:18:59,194 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.779e+02 2.083e+02 2.593e+02 4.743e+02, threshold=4.166e+02, percent-clipped=4.0 +2023-04-26 21:19:01,812 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4209, 1.7066, 1.7535, 1.8813, 1.7271, 1.8872, 1.8592, 1.8436], + device='cuda:0'), covar=tensor([0.4331, 0.6574, 0.6155, 0.5304, 0.6786, 0.8565, 0.6449, 0.6287], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0386, 0.0317, 0.0327, 0.0343, 0.0407, 0.0368, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 21:19:06,530 INFO [finetune.py:976] (0/7) Epoch 8, batch 3350, loss[loss=0.1875, simple_loss=0.252, pruned_loss=0.06149, over 4797.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2648, pruned_loss=0.06763, over 953485.03 frames. ], batch size: 29, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:19:16,117 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.55 vs. limit=5.0 +2023-04-26 21:19:40,384 INFO [finetune.py:976] (0/7) Epoch 8, batch 3400, loss[loss=0.2127, simple_loss=0.2789, pruned_loss=0.07326, over 4153.00 frames. ], tot_loss[loss=0.201, simple_loss=0.2663, pruned_loss=0.06787, over 952633.38 frames. ], batch size: 66, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:19:54,943 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43514.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:19:55,530 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43515.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:20:01,484 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4072, 1.7276, 2.3063, 2.9328, 2.2505, 1.7375, 1.6032, 2.0914], + device='cuda:0'), covar=tensor([0.3841, 0.4190, 0.2086, 0.3339, 0.3516, 0.3397, 0.5078, 0.2863], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0253, 0.0220, 0.0323, 0.0215, 0.0230, 0.0237, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 21:20:06,649 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.715e+02 2.177e+02 2.420e+02 5.380e+02, threshold=4.353e+02, percent-clipped=2.0 +2023-04-26 21:20:08,577 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=43535.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:20:13,907 INFO [finetune.py:976] (0/7) Epoch 8, batch 3450, loss[loss=0.2007, simple_loss=0.2805, pruned_loss=0.06048, over 4864.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2659, pruned_loss=0.06763, over 953022.88 frames. ], batch size: 34, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:20:26,909 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=43563.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:20:30,252 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0780, 2.6147, 1.0223, 1.4416, 1.8606, 1.2450, 3.5429, 1.7785], + device='cuda:0'), covar=tensor([0.0667, 0.0679, 0.0867, 0.1259, 0.0551, 0.1002, 0.0199, 0.0639], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0068, 0.0050, 0.0047, 0.0051, 0.0052, 0.0078, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-26 21:20:33,174 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5006, 1.1920, 1.2396, 1.1681, 1.6923, 1.3226, 1.1149, 1.1975], + device='cuda:0'), covar=tensor([0.1593, 0.1467, 0.1990, 0.1676, 0.0809, 0.1499, 0.1942, 0.2152], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0327, 0.0358, 0.0306, 0.0343, 0.0328, 0.0313, 0.0358], + device='cuda:0'), out_proj_covar=tensor([6.6055e-05, 6.9585e-05, 7.7389e-05, 6.3529e-05, 7.2165e-05, 7.0463e-05, + 6.7409e-05, 7.6919e-05], device='cuda:0') +2023-04-26 21:20:35,638 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43575.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:20:36,865 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3154, 2.9335, 2.3185, 2.2957, 1.8774, 1.9365, 2.4101, 1.8909], + device='cuda:0'), covar=tensor([0.1365, 0.1308, 0.1360, 0.1565, 0.2113, 0.1739, 0.0971, 0.1698], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0219, 0.0173, 0.0206, 0.0207, 0.0185, 0.0164, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 21:20:39,321 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8725, 1.4221, 1.7103, 1.6963, 1.6510, 1.3832, 0.7061, 1.3221], + device='cuda:0'), covar=tensor([0.3948, 0.4216, 0.2164, 0.2763, 0.3387, 0.3378, 0.5181, 0.2893], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0254, 0.0221, 0.0324, 0.0215, 0.0230, 0.0238, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 21:20:40,946 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=43583.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:20:44,676 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3976, 3.1291, 0.9936, 1.6822, 2.1481, 1.3728, 4.2736, 1.9748], + device='cuda:0'), covar=tensor([0.0692, 0.0802, 0.0936, 0.1316, 0.0553, 0.1071, 0.0318, 0.0660], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0068, 0.0050, 0.0047, 0.0052, 0.0052, 0.0079, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-26 21:20:47,610 INFO [finetune.py:976] (0/7) Epoch 8, batch 3500, loss[loss=0.1998, simple_loss=0.254, pruned_loss=0.07285, over 4742.00 frames. ], tot_loss[loss=0.1988, simple_loss=0.2634, pruned_loss=0.06706, over 952229.46 frames. ], batch size: 59, lr: 3.83e-03, grad_scale: 32.0 +2023-04-26 21:21:13,732 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.083e+02 1.646e+02 1.959e+02 2.415e+02 4.063e+02, threshold=3.918e+02, percent-clipped=0.0 +2023-04-26 21:21:21,526 INFO [finetune.py:976] (0/7) Epoch 8, batch 3550, loss[loss=0.13, simple_loss=0.1971, pruned_loss=0.03143, over 4772.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2611, pruned_loss=0.0666, over 952892.58 frames. ], batch size: 26, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:21:25,903 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6338, 1.4879, 0.5469, 1.2967, 1.4474, 1.5177, 1.4076, 1.3842], + device='cuda:0'), covar=tensor([0.0521, 0.0397, 0.0445, 0.0577, 0.0305, 0.0553, 0.0530, 0.0609], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0050, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 21:22:22,680 INFO [finetune.py:976] (0/7) Epoch 8, batch 3600, loss[loss=0.1853, simple_loss=0.2492, pruned_loss=0.06075, over 4765.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2575, pruned_loss=0.06537, over 951676.21 frames. ], batch size: 26, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:22:22,796 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4951, 1.3937, 1.7650, 1.7528, 1.4356, 1.2455, 1.5542, 1.0203], + device='cuda:0'), covar=tensor([0.0596, 0.0791, 0.0515, 0.0706, 0.0843, 0.1087, 0.0792, 0.0816], + device='cuda:0'), in_proj_covar=tensor([0.0065, 0.0073, 0.0072, 0.0066, 0.0076, 0.0095, 0.0078, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 21:23:15,355 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.186e+02 1.664e+02 2.003e+02 2.703e+02 5.988e+02, threshold=4.006e+02, percent-clipped=3.0 +2023-04-26 21:23:34,071 INFO [finetune.py:976] (0/7) Epoch 8, batch 3650, loss[loss=0.2455, simple_loss=0.3107, pruned_loss=0.09018, over 4904.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2611, pruned_loss=0.06664, over 953353.02 frames. ], batch size: 43, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:24:29,560 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43787.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:24:33,740 INFO [finetune.py:976] (0/7) Epoch 8, batch 3700, loss[loss=0.1822, simple_loss=0.2523, pruned_loss=0.05601, over 4894.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2647, pruned_loss=0.06788, over 952401.93 frames. ], batch size: 35, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:24:33,844 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0974, 1.4227, 1.2444, 1.7482, 1.5384, 1.6797, 1.2927, 3.0773], + device='cuda:0'), covar=tensor([0.0696, 0.0820, 0.0831, 0.1151, 0.0665, 0.0496, 0.0783, 0.0172], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 21:24:41,341 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-26 21:25:03,744 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.778e+02 2.044e+02 2.450e+02 4.108e+02, threshold=4.087e+02, percent-clipped=2.0 +2023-04-26 21:25:11,088 INFO [finetune.py:976] (0/7) Epoch 8, batch 3750, loss[loss=0.2027, simple_loss=0.2565, pruned_loss=0.07447, over 4866.00 frames. ], tot_loss[loss=0.2013, simple_loss=0.2663, pruned_loss=0.06809, over 952529.40 frames. ], batch size: 31, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:25:14,024 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43848.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:25:21,375 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.50 vs. limit=5.0 +2023-04-26 21:25:27,342 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=43870.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:25:44,746 INFO [finetune.py:976] (0/7) Epoch 8, batch 3800, loss[loss=0.1764, simple_loss=0.251, pruned_loss=0.05092, over 4822.00 frames. ], tot_loss[loss=0.2021, simple_loss=0.2677, pruned_loss=0.06824, over 953860.06 frames. ], batch size: 39, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:25:50,251 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0319, 2.3932, 1.0195, 1.3203, 1.7317, 1.1510, 3.3075, 1.5878], + device='cuda:0'), covar=tensor([0.0696, 0.0674, 0.0830, 0.1373, 0.0561, 0.1112, 0.0260, 0.0697], + device='cuda:0'), in_proj_covar=tensor([0.0053, 0.0068, 0.0050, 0.0048, 0.0052, 0.0053, 0.0079, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 21:25:54,474 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43909.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:26:09,807 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.636e+02 2.019e+02 2.329e+02 3.676e+02, threshold=4.038e+02, percent-clipped=0.0 +2023-04-26 21:26:18,578 INFO [finetune.py:976] (0/7) Epoch 8, batch 3850, loss[loss=0.2468, simple_loss=0.2974, pruned_loss=0.09811, over 4835.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2643, pruned_loss=0.06681, over 953804.06 frames. ], batch size: 47, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:26:28,326 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=43959.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:26:35,120 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=43970.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:27:02,336 INFO [finetune.py:976] (0/7) Epoch 8, batch 3900, loss[loss=0.213, simple_loss=0.2514, pruned_loss=0.08732, over 4208.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2607, pruned_loss=0.06527, over 953567.32 frames. ], batch size: 18, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:27:06,233 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-44000.pt +2023-04-26 21:27:28,378 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7442, 1.4154, 4.1650, 3.8783, 3.5925, 3.8868, 3.7678, 3.6640], + device='cuda:0'), covar=tensor([0.7158, 0.5567, 0.0939, 0.1382, 0.1163, 0.1736, 0.2477, 0.1393], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0304, 0.0407, 0.0409, 0.0348, 0.0402, 0.0315, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:27:35,520 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44020.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:27:48,403 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.103e+02 1.747e+02 1.960e+02 2.337e+02 4.630e+02, threshold=3.919e+02, percent-clipped=3.0 +2023-04-26 21:28:08,675 INFO [finetune.py:976] (0/7) Epoch 8, batch 3950, loss[loss=0.2172, simple_loss=0.2778, pruned_loss=0.0783, over 4900.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2573, pruned_loss=0.06437, over 952138.32 frames. ], batch size: 35, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:28:38,133 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-26 21:28:43,483 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3253, 1.5535, 1.3207, 1.4721, 1.2560, 1.2113, 1.3910, 1.1352], + device='cuda:0'), covar=tensor([0.1502, 0.1098, 0.0883, 0.1077, 0.2967, 0.1211, 0.1361, 0.1815], + device='cuda:0'), in_proj_covar=tensor([0.0299, 0.0319, 0.0230, 0.0291, 0.0317, 0.0273, 0.0260, 0.0282], + device='cuda:0'), out_proj_covar=tensor([1.2134e-04, 1.2898e-04, 9.2740e-05, 1.1652e-04, 1.3047e-04, 1.1041e-04, + 1.0642e-04, 1.1350e-04], device='cuda:0') +2023-04-26 21:28:58,845 INFO [finetune.py:976] (0/7) Epoch 8, batch 4000, loss[loss=0.2221, simple_loss=0.2733, pruned_loss=0.0854, over 4835.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2573, pruned_loss=0.06496, over 950091.05 frames. ], batch size: 33, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:29:08,613 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5836, 1.9999, 5.6123, 5.2360, 4.8982, 5.2190, 4.7628, 4.9194], + device='cuda:0'), covar=tensor([0.5878, 0.5990, 0.0931, 0.1779, 0.1351, 0.1524, 0.1211, 0.1890], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0304, 0.0406, 0.0408, 0.0347, 0.0401, 0.0314, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:29:49,240 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.897e+01 1.638e+02 2.029e+02 2.445e+02 5.584e+02, threshold=4.057e+02, percent-clipped=1.0 +2023-04-26 21:30:02,114 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44143.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:30:02,666 INFO [finetune.py:976] (0/7) Epoch 8, batch 4050, loss[loss=0.2051, simple_loss=0.2779, pruned_loss=0.0661, over 4896.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2593, pruned_loss=0.06541, over 948401.83 frames. ], batch size: 35, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:30:35,442 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44170.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:31:07,887 INFO [finetune.py:976] (0/7) Epoch 8, batch 4100, loss[loss=0.2034, simple_loss=0.2694, pruned_loss=0.06866, over 4812.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2614, pruned_loss=0.06579, over 949707.79 frames. ], batch size: 39, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:31:27,677 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6297, 1.6613, 0.7724, 1.3418, 1.8115, 1.5270, 1.4111, 1.3956], + device='cuda:0'), covar=tensor([0.0500, 0.0369, 0.0388, 0.0554, 0.0300, 0.0512, 0.0478, 0.0591], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0038, 0.0048, 0.0048, 0.0049], + device='cuda:0') +2023-04-26 21:31:30,743 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3277, 3.0528, 0.9164, 1.5253, 1.8890, 1.3604, 4.0757, 1.7465], + device='cuda:0'), covar=tensor([0.0680, 0.1027, 0.1007, 0.1326, 0.0597, 0.1068, 0.0320, 0.0700], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0068, 0.0050, 0.0048, 0.0052, 0.0053, 0.0079, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 21:31:41,623 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=44218.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:32:00,834 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.207e+02 1.648e+02 2.017e+02 2.380e+02 4.436e+02, threshold=4.034e+02, percent-clipped=1.0 +2023-04-26 21:32:13,835 INFO [finetune.py:976] (0/7) Epoch 8, batch 4150, loss[loss=0.1901, simple_loss=0.256, pruned_loss=0.06212, over 4864.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2637, pruned_loss=0.06648, over 953592.92 frames. ], batch size: 44, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:32:34,619 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44265.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:32:49,154 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.40 vs. limit=5.0 +2023-04-26 21:32:52,669 INFO [finetune.py:976] (0/7) Epoch 8, batch 4200, loss[loss=0.1968, simple_loss=0.2696, pruned_loss=0.06204, over 4926.00 frames. ], tot_loss[loss=0.1979, simple_loss=0.2637, pruned_loss=0.06603, over 954615.71 frames. ], batch size: 42, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:33:08,257 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44315.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:33:14,878 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-26 21:33:18,928 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.745e+02 1.959e+02 2.417e+02 4.699e+02, threshold=3.917e+02, percent-clipped=3.0 +2023-04-26 21:33:26,340 INFO [finetune.py:976] (0/7) Epoch 8, batch 4250, loss[loss=0.1932, simple_loss=0.2364, pruned_loss=0.07499, over 4328.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2611, pruned_loss=0.06509, over 954648.92 frames. ], batch size: 19, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:33:54,189 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9711, 2.7321, 1.9871, 1.8557, 1.3739, 1.5026, 2.0403, 1.4224], + device='cuda:0'), covar=tensor([0.1812, 0.1396, 0.1679, 0.1870, 0.2650, 0.2123, 0.1136, 0.2243], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0217, 0.0172, 0.0205, 0.0206, 0.0185, 0.0163, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 21:34:00,167 INFO [finetune.py:976] (0/7) Epoch 8, batch 4300, loss[loss=0.1655, simple_loss=0.2298, pruned_loss=0.05058, over 4296.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2581, pruned_loss=0.06369, over 956194.68 frames. ], batch size: 65, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:34:26,193 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.667e+02 1.928e+02 2.256e+02 4.744e+02, threshold=3.855e+02, percent-clipped=2.0 +2023-04-26 21:34:44,386 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9692, 1.3121, 5.0916, 4.4210, 4.4773, 4.7622, 4.3058, 4.2768], + device='cuda:0'), covar=tensor([0.8402, 0.9293, 0.1338, 0.2832, 0.1698, 0.3578, 0.2492, 0.2343], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0307, 0.0409, 0.0413, 0.0349, 0.0405, 0.0316, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:34:44,413 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44443.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:34:44,911 INFO [finetune.py:976] (0/7) Epoch 8, batch 4350, loss[loss=0.1729, simple_loss=0.2344, pruned_loss=0.05574, over 4835.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2541, pruned_loss=0.06241, over 956965.14 frames. ], batch size: 33, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:35:05,911 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5630, 1.5411, 0.7446, 1.3044, 1.5587, 1.4693, 1.3686, 1.3726], + device='cuda:0'), covar=tensor([0.0544, 0.0396, 0.0416, 0.0608, 0.0315, 0.0554, 0.0511, 0.0610], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0049, 0.0038, 0.0048, 0.0048, 0.0049], + device='cuda:0') +2023-04-26 21:35:46,505 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=44491.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:35:48,243 INFO [finetune.py:976] (0/7) Epoch 8, batch 4400, loss[loss=0.2246, simple_loss=0.2818, pruned_loss=0.08368, over 4906.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2568, pruned_loss=0.0639, over 954678.46 frames. ], batch size: 37, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:35:50,161 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4011, 2.2175, 2.5829, 2.9101, 2.9034, 2.2074, 2.0147, 2.5612], + device='cuda:0'), covar=tensor([0.0946, 0.1021, 0.0586, 0.0623, 0.0491, 0.0985, 0.0863, 0.0603], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0204, 0.0180, 0.0176, 0.0177, 0.0190, 0.0161, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:36:11,275 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44527.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:36:14,648 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.807e+02 2.142e+02 2.505e+02 5.650e+02, threshold=4.284e+02, percent-clipped=2.0 +2023-04-26 21:36:23,695 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1420, 2.2064, 1.9500, 1.8134, 2.3325, 1.9524, 2.8556, 1.7130], + device='cuda:0'), covar=tensor([0.3807, 0.1838, 0.4325, 0.3166, 0.1610, 0.2466, 0.1436, 0.4331], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0352, 0.0433, 0.0367, 0.0388, 0.0384, 0.0385, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:36:33,286 INFO [finetune.py:976] (0/7) Epoch 8, batch 4450, loss[loss=0.1887, simple_loss=0.2622, pruned_loss=0.05757, over 4824.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2606, pruned_loss=0.06512, over 954516.82 frames. ], batch size: 40, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:36:37,053 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=44550.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:36:58,010 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44565.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:37:28,050 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-26 21:37:30,509 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44588.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:37:39,768 INFO [finetune.py:976] (0/7) Epoch 8, batch 4500, loss[loss=0.2065, simple_loss=0.2859, pruned_loss=0.06355, over 4834.00 frames. ], tot_loss[loss=0.1965, simple_loss=0.2621, pruned_loss=0.06541, over 956807.36 frames. ], batch size: 47, lr: 3.82e-03, grad_scale: 64.0 +2023-04-26 21:37:57,793 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=44611.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:37:58,973 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=44613.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:38:00,246 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=44615.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:38:30,723 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.764e+02 2.099e+02 2.413e+02 4.489e+02, threshold=4.197e+02, percent-clipped=1.0 +2023-04-26 21:38:31,473 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7212, 2.3187, 2.3350, 2.7425, 2.5382, 2.4144, 2.1679, 4.9161], + device='cuda:0'), covar=tensor([0.0594, 0.0678, 0.0695, 0.0985, 0.0560, 0.0494, 0.0670, 0.0104], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 21:38:43,666 INFO [finetune.py:976] (0/7) Epoch 8, batch 4550, loss[loss=0.1796, simple_loss=0.2566, pruned_loss=0.0513, over 4914.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2634, pruned_loss=0.06583, over 955398.35 frames. ], batch size: 38, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:38:55,366 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=44663.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:39:16,582 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2888, 1.2947, 3.8700, 3.5920, 3.4426, 3.6781, 3.6986, 3.4154], + device='cuda:0'), covar=tensor([0.7067, 0.5708, 0.1154, 0.1807, 0.1235, 0.1755, 0.1251, 0.1533], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0307, 0.0408, 0.0412, 0.0348, 0.0404, 0.0315, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:39:17,128 INFO [finetune.py:976] (0/7) Epoch 8, batch 4600, loss[loss=0.185, simple_loss=0.2414, pruned_loss=0.06428, over 4756.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2619, pruned_loss=0.06479, over 954983.58 frames. ], batch size: 27, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:39:43,235 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.725e+02 1.987e+02 2.350e+02 3.510e+02, threshold=3.973e+02, percent-clipped=0.0 +2023-04-26 21:39:55,902 INFO [finetune.py:976] (0/7) Epoch 8, batch 4650, loss[loss=0.2225, simple_loss=0.2767, pruned_loss=0.08417, over 4937.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2597, pruned_loss=0.06452, over 954939.49 frames. ], batch size: 33, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:41:01,770 INFO [finetune.py:976] (0/7) Epoch 8, batch 4700, loss[loss=0.1491, simple_loss=0.2141, pruned_loss=0.04211, over 4831.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2567, pruned_loss=0.06385, over 953862.67 frames. ], batch size: 25, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:41:02,974 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8627, 1.9409, 1.7390, 1.5132, 2.1178, 1.7613, 2.5223, 1.4906], + device='cuda:0'), covar=tensor([0.3654, 0.1727, 0.4598, 0.2812, 0.1407, 0.2142, 0.1421, 0.4780], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0349, 0.0430, 0.0364, 0.0386, 0.0382, 0.0383, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:41:04,728 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3636, 1.7921, 5.6137, 5.2711, 4.9339, 5.2814, 4.8165, 5.0262], + device='cuda:0'), covar=tensor([0.6348, 0.6169, 0.0884, 0.1577, 0.0856, 0.1348, 0.1064, 0.1470], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0305, 0.0407, 0.0410, 0.0347, 0.0403, 0.0315, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:41:26,396 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.673e+02 1.965e+02 2.397e+02 5.509e+02, threshold=3.929e+02, percent-clipped=2.0 +2023-04-26 21:41:32,788 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2940, 2.1284, 2.4583, 2.7533, 2.8610, 2.1415, 1.9716, 2.3487], + device='cuda:0'), covar=tensor([0.0979, 0.1108, 0.0733, 0.0714, 0.0551, 0.0975, 0.0899, 0.0705], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0207, 0.0182, 0.0178, 0.0178, 0.0192, 0.0162, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:41:35,060 INFO [finetune.py:976] (0/7) Epoch 8, batch 4750, loss[loss=0.2777, simple_loss=0.3225, pruned_loss=0.1165, over 4127.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2566, pruned_loss=0.06435, over 954363.28 frames. ], batch size: 65, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:41:59,217 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44883.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:42:03,825 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-26 21:42:08,192 INFO [finetune.py:976] (0/7) Epoch 8, batch 4800, loss[loss=0.2362, simple_loss=0.3015, pruned_loss=0.08545, over 4905.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2601, pruned_loss=0.06594, over 951704.38 frames. ], batch size: 37, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:42:16,076 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=44906.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:42:16,224 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-26 21:42:32,274 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.751e+02 2.130e+02 2.730e+02 4.658e+02, threshold=4.261e+02, percent-clipped=2.0 +2023-04-26 21:42:40,904 INFO [finetune.py:976] (0/7) Epoch 8, batch 4850, loss[loss=0.2167, simple_loss=0.2918, pruned_loss=0.07074, over 4764.00 frames. ], tot_loss[loss=0.1989, simple_loss=0.2636, pruned_loss=0.06711, over 951750.36 frames. ], batch size: 59, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:43:40,817 INFO [finetune.py:976] (0/7) Epoch 8, batch 4900, loss[loss=0.1447, simple_loss=0.216, pruned_loss=0.0367, over 4818.00 frames. ], tot_loss[loss=0.2007, simple_loss=0.2649, pruned_loss=0.0682, over 952606.34 frames. ], batch size: 25, lr: 3.82e-03, grad_scale: 32.0 +2023-04-26 21:44:34,741 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.241e+02 1.669e+02 1.942e+02 2.230e+02 4.872e+02, threshold=3.883e+02, percent-clipped=3.0 +2023-04-26 21:44:46,772 INFO [finetune.py:976] (0/7) Epoch 8, batch 4950, loss[loss=0.2248, simple_loss=0.2898, pruned_loss=0.07992, over 4815.00 frames. ], tot_loss[loss=0.2011, simple_loss=0.2659, pruned_loss=0.06817, over 952106.61 frames. ], batch size: 39, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:45:01,827 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.7462, 1.9286, 1.8306, 1.5200, 1.9672, 1.6249, 2.6175, 1.5714], + device='cuda:0'), covar=tensor([0.4374, 0.1904, 0.5355, 0.3297, 0.1818, 0.2672, 0.1444, 0.4843], + device='cuda:0'), in_proj_covar=tensor([0.0349, 0.0353, 0.0434, 0.0367, 0.0390, 0.0385, 0.0385, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 21:45:08,073 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45056.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:45:11,062 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3555, 1.6447, 1.4265, 1.4987, 1.3175, 1.3416, 1.4893, 1.2123], + device='cuda:0'), covar=tensor([0.1376, 0.1118, 0.0756, 0.0950, 0.2436, 0.1035, 0.1287, 0.1706], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0321, 0.0230, 0.0293, 0.0318, 0.0274, 0.0261, 0.0283], + device='cuda:0'), out_proj_covar=tensor([1.2195e-04, 1.2963e-04, 9.2723e-05, 1.1741e-04, 1.3078e-04, 1.1073e-04, + 1.0677e-04, 1.1374e-04], device='cuda:0') +2023-04-26 21:45:35,325 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45081.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:45:47,623 INFO [finetune.py:976] (0/7) Epoch 8, batch 5000, loss[loss=0.2013, simple_loss=0.254, pruned_loss=0.07426, over 4881.00 frames. ], tot_loss[loss=0.1996, simple_loss=0.2643, pruned_loss=0.06747, over 950045.44 frames. ], batch size: 32, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:46:04,154 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45117.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 21:46:13,678 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.645e+02 2.005e+02 2.467e+02 4.350e+02, threshold=4.011e+02, percent-clipped=3.0 +2023-04-26 21:46:15,695 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-26 21:46:19,219 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8509, 2.8173, 2.2274, 3.3097, 2.8088, 2.9215, 1.1812, 2.7737], + device='cuda:0'), covar=tensor([0.2160, 0.1569, 0.3351, 0.2866, 0.3462, 0.2119, 0.6074, 0.3084], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0217, 0.0251, 0.0307, 0.0301, 0.0250, 0.0272, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 21:46:19,283 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45142.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:46:20,392 INFO [finetune.py:976] (0/7) Epoch 8, batch 5050, loss[loss=0.1699, simple_loss=0.2385, pruned_loss=0.05063, over 4897.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2609, pruned_loss=0.06629, over 951685.06 frames. ], batch size: 35, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:46:46,997 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45183.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:46:53,634 INFO [finetune.py:976] (0/7) Epoch 8, batch 5100, loss[loss=0.165, simple_loss=0.2403, pruned_loss=0.04484, over 4757.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2575, pruned_loss=0.0645, over 952659.77 frames. ], batch size: 27, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:47:02,070 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45206.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:47:18,716 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=45231.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:47:19,883 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.617e+02 1.880e+02 2.299e+02 5.532e+02, threshold=3.760e+02, percent-clipped=2.0 +2023-04-26 21:47:26,589 INFO [finetune.py:976] (0/7) Epoch 8, batch 5150, loss[loss=0.1946, simple_loss=0.2593, pruned_loss=0.065, over 4899.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2571, pruned_loss=0.0643, over 953763.87 frames. ], batch size: 36, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:47:32,721 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=45254.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:47:49,136 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6336, 2.1452, 1.6823, 1.8550, 1.3839, 1.6286, 1.9022, 1.4370], + device='cuda:0'), covar=tensor([0.2628, 0.2044, 0.1501, 0.2150, 0.3845, 0.1923, 0.2206, 0.3256], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0323, 0.0232, 0.0296, 0.0321, 0.0277, 0.0262, 0.0284], + device='cuda:0'), out_proj_covar=tensor([1.2262e-04, 1.3043e-04, 9.3702e-05, 1.1838e-04, 1.3183e-04, 1.1170e-04, + 1.0741e-04, 1.1400e-04], device='cuda:0') +2023-04-26 21:47:51,664 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-26 21:48:00,050 INFO [finetune.py:976] (0/7) Epoch 8, batch 5200, loss[loss=0.1871, simple_loss=0.2552, pruned_loss=0.05946, over 4822.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2596, pruned_loss=0.0648, over 953013.54 frames. ], batch size: 25, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:48:34,088 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4494, 1.2947, 1.7002, 1.6519, 1.3199, 1.1277, 1.4472, 0.8851], + device='cuda:0'), covar=tensor([0.0608, 0.0780, 0.0503, 0.0799, 0.0860, 0.1166, 0.0628, 0.0854], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0073, 0.0072, 0.0067, 0.0076, 0.0096, 0.0078, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 21:48:44,629 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.724e+02 2.073e+02 2.442e+02 4.626e+02, threshold=4.147e+02, percent-clipped=2.0 +2023-04-26 21:48:46,582 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7181, 2.0372, 1.7863, 1.8537, 1.6549, 1.6537, 1.7328, 1.4764], + device='cuda:0'), covar=tensor([0.1587, 0.1146, 0.0876, 0.1145, 0.2745, 0.1276, 0.1665, 0.2158], + device='cuda:0'), in_proj_covar=tensor([0.0301, 0.0323, 0.0232, 0.0295, 0.0320, 0.0277, 0.0261, 0.0284], + device='cuda:0'), out_proj_covar=tensor([1.2211e-04, 1.3017e-04, 9.3573e-05, 1.1809e-04, 1.3158e-04, 1.1172e-04, + 1.0702e-04, 1.1377e-04], device='cuda:0') +2023-04-26 21:48:56,369 INFO [finetune.py:976] (0/7) Epoch 8, batch 5250, loss[loss=0.2093, simple_loss=0.2727, pruned_loss=0.07296, over 4908.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2621, pruned_loss=0.06525, over 952263.90 frames. ], batch size: 37, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:48:58,924 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45348.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 21:48:58,968 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4998, 1.8005, 1.7876, 1.9295, 1.8172, 1.9245, 1.9275, 1.8608], + device='cuda:0'), covar=tensor([0.5135, 0.7702, 0.6732, 0.6385, 0.7796, 1.1039, 0.8558, 0.7532], + device='cuda:0'), in_proj_covar=tensor([0.0320, 0.0381, 0.0314, 0.0324, 0.0339, 0.0401, 0.0361, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 21:50:01,142 INFO [finetune.py:976] (0/7) Epoch 8, batch 5300, loss[loss=0.1882, simple_loss=0.253, pruned_loss=0.06174, over 4217.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2627, pruned_loss=0.0652, over 952013.93 frames. ], batch size: 65, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:50:21,682 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45409.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 21:50:23,487 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45412.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 21:50:55,375 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.314e+02 1.711e+02 1.928e+02 2.396e+02 5.196e+02, threshold=3.857e+02, percent-clipped=2.0 +2023-04-26 21:50:57,916 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45437.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:51:07,772 INFO [finetune.py:976] (0/7) Epoch 8, batch 5350, loss[loss=0.1937, simple_loss=0.2502, pruned_loss=0.06867, over 4855.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2629, pruned_loss=0.06512, over 952628.91 frames. ], batch size: 31, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:51:41,767 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2164, 2.7035, 2.3726, 2.5711, 2.1324, 2.2898, 2.3939, 1.9856], + device='cuda:0'), covar=tensor([0.1848, 0.1207, 0.0851, 0.1123, 0.2626, 0.1245, 0.1775, 0.2262], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0320, 0.0231, 0.0293, 0.0318, 0.0274, 0.0259, 0.0282], + device='cuda:0'), out_proj_covar=tensor([1.2167e-04, 1.2910e-04, 9.2942e-05, 1.1721e-04, 1.3076e-04, 1.1071e-04, + 1.0620e-04, 1.1317e-04], device='cuda:0') +2023-04-26 21:51:45,336 INFO [finetune.py:976] (0/7) Epoch 8, batch 5400, loss[loss=0.2538, simple_loss=0.2951, pruned_loss=0.1063, over 4705.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2609, pruned_loss=0.06501, over 951571.72 frames. ], batch size: 23, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:52:11,222 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.671e+02 1.868e+02 2.359e+02 4.891e+02, threshold=3.736e+02, percent-clipped=4.0 +2023-04-26 21:52:18,080 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-26 21:52:18,381 INFO [finetune.py:976] (0/7) Epoch 8, batch 5450, loss[loss=0.2057, simple_loss=0.2725, pruned_loss=0.06941, over 4819.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2586, pruned_loss=0.06424, over 954524.19 frames. ], batch size: 30, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:52:51,571 INFO [finetune.py:976] (0/7) Epoch 8, batch 5500, loss[loss=0.1684, simple_loss=0.2455, pruned_loss=0.04564, over 4806.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2557, pruned_loss=0.06321, over 955164.83 frames. ], batch size: 45, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:53:16,426 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.725e+02 1.934e+02 2.387e+02 4.697e+02, threshold=3.868e+02, percent-clipped=2.0 +2023-04-26 21:53:24,570 INFO [finetune.py:976] (0/7) Epoch 8, batch 5550, loss[loss=0.186, simple_loss=0.2498, pruned_loss=0.0611, over 4752.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2577, pruned_loss=0.06446, over 954157.76 frames. ], batch size: 26, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:53:33,812 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0011, 1.4158, 1.8504, 1.9574, 1.8156, 1.4457, 0.9044, 1.4404], + device='cuda:0'), covar=tensor([0.3878, 0.3838, 0.1878, 0.2848, 0.3199, 0.3108, 0.5049, 0.2556], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0253, 0.0221, 0.0321, 0.0215, 0.0229, 0.0237, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 21:54:06,993 INFO [finetune.py:976] (0/7) Epoch 8, batch 5600, loss[loss=0.2281, simple_loss=0.2944, pruned_loss=0.08095, over 4822.00 frames. ], tot_loss[loss=0.1963, simple_loss=0.2615, pruned_loss=0.06555, over 951981.86 frames. ], batch size: 33, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:54:12,167 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45702.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:54:13,282 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=45704.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 21:54:17,987 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45712.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 21:54:26,852 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-26 21:54:30,164 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.723e+02 2.023e+02 2.514e+02 5.159e+02, threshold=4.046e+02, percent-clipped=6.0 +2023-04-26 21:54:32,582 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=45737.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:54:36,957 INFO [finetune.py:976] (0/7) Epoch 8, batch 5650, loss[loss=0.2166, simple_loss=0.3005, pruned_loss=0.06633, over 4918.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2656, pruned_loss=0.06666, over 953207.55 frames. ], batch size: 42, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:54:46,839 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=45760.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:54:48,643 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45763.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:54:54,987 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45774.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:55:07,628 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=45785.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:55:18,507 INFO [finetune.py:976] (0/7) Epoch 8, batch 5700, loss[loss=0.2232, simple_loss=0.2458, pruned_loss=0.1003, over 4283.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2627, pruned_loss=0.06686, over 936614.27 frames. ], batch size: 18, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:55:40,670 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.0193, 2.4560, 2.8700, 3.6814, 2.7459, 2.4828, 2.3327, 2.8064], + device='cuda:0'), covar=tensor([0.3719, 0.3404, 0.1680, 0.2814, 0.3314, 0.2777, 0.4038, 0.2565], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0254, 0.0221, 0.0322, 0.0215, 0.0230, 0.0237, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 21:55:43,798 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-8.pt +2023-04-26 21:55:59,017 INFO [finetune.py:976] (0/7) Epoch 9, batch 0, loss[loss=0.2061, simple_loss=0.2689, pruned_loss=0.07162, over 4846.00 frames. ], tot_loss[loss=0.2061, simple_loss=0.2689, pruned_loss=0.07162, over 4846.00 frames. ], batch size: 44, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:55:59,018 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 21:56:03,975 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4531, 3.0698, 0.9122, 1.7898, 2.0057, 2.2887, 1.9574, 1.0377], + device='cuda:0'), covar=tensor([0.1283, 0.1020, 0.2051, 0.1288, 0.0955, 0.0913, 0.1528, 0.1762], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0253, 0.0142, 0.0124, 0.0135, 0.0156, 0.0120, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 21:56:14,949 INFO [finetune.py:1010] (0/7) Epoch 9, validation: loss=0.1554, simple_loss=0.2289, pruned_loss=0.04093, over 2265189.00 frames. +2023-04-26 21:56:14,950 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 21:56:33,615 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.748e+02 2.064e+02 2.356e+02 2.984e+02, threshold=4.129e+02, percent-clipped=0.0 +2023-04-26 21:56:34,959 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45835.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:56:46,618 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0147, 2.4366, 0.9788, 1.2420, 1.7211, 1.1612, 3.0365, 1.4385], + device='cuda:0'), covar=tensor([0.0692, 0.0568, 0.0729, 0.1362, 0.0537, 0.1070, 0.0318, 0.0719], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0068, 0.0050, 0.0048, 0.0052, 0.0053, 0.0079, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0011, 0.0008], + device='cuda:0') +2023-04-26 21:57:00,185 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=45859.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:57:19,424 INFO [finetune.py:976] (0/7) Epoch 9, batch 50, loss[loss=0.2174, simple_loss=0.2821, pruned_loss=0.07631, over 4808.00 frames. ], tot_loss[loss=0.2006, simple_loss=0.2667, pruned_loss=0.06725, over 217051.39 frames. ], batch size: 39, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:57:38,667 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5921, 1.2622, 1.2684, 1.2979, 1.8451, 1.4634, 1.2415, 1.2100], + device='cuda:0'), covar=tensor([0.1515, 0.1266, 0.1750, 0.1384, 0.0667, 0.1283, 0.1811, 0.1992], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0326, 0.0354, 0.0303, 0.0341, 0.0327, 0.0311, 0.0357], + device='cuda:0'), out_proj_covar=tensor([6.6122e-05, 6.9124e-05, 7.6568e-05, 6.2735e-05, 7.1757e-05, 7.0199e-05, + 6.6910e-05, 7.6727e-05], device='cuda:0') +2023-04-26 21:57:51,854 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=45920.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:57:52,935 INFO [finetune.py:976] (0/7) Epoch 9, batch 100, loss[loss=0.1476, simple_loss=0.2182, pruned_loss=0.03852, over 4846.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2612, pruned_loss=0.06557, over 380385.22 frames. ], batch size: 49, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:58:01,517 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.187e+02 1.655e+02 1.945e+02 2.383e+02 5.251e+02, threshold=3.889e+02, percent-clipped=1.0 +2023-04-26 21:58:24,723 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-26 21:58:26,128 INFO [finetune.py:976] (0/7) Epoch 9, batch 150, loss[loss=0.1819, simple_loss=0.2472, pruned_loss=0.05834, over 4824.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2543, pruned_loss=0.06344, over 508456.10 frames. ], batch size: 39, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:58:44,219 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-46000.pt +2023-04-26 21:58:47,798 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46004.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 21:58:59,027 INFO [finetune.py:976] (0/7) Epoch 9, batch 200, loss[loss=0.1351, simple_loss=0.2014, pruned_loss=0.03443, over 4778.00 frames. ], tot_loss[loss=0.191, simple_loss=0.254, pruned_loss=0.06395, over 608108.79 frames. ], batch size: 28, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:59:07,992 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.665e+01 1.625e+02 2.034e+02 2.353e+02 6.037e+02, threshold=4.068e+02, percent-clipped=2.0 +2023-04-26 21:59:19,770 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=46052.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 21:59:23,444 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46058.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 21:59:32,393 INFO [finetune.py:976] (0/7) Epoch 9, batch 250, loss[loss=0.1546, simple_loss=0.232, pruned_loss=0.03855, over 4739.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2586, pruned_loss=0.0657, over 683305.15 frames. ], batch size: 54, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 21:59:48,907 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.80 vs. limit=5.0 +2023-04-26 21:59:51,842 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-26 21:59:53,134 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-26 21:59:59,749 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-26 22:00:04,657 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-26 22:00:05,474 INFO [finetune.py:976] (0/7) Epoch 9, batch 300, loss[loss=0.1968, simple_loss=0.2638, pruned_loss=0.06496, over 4804.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2627, pruned_loss=0.06596, over 744904.79 frames. ], batch size: 41, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 22:00:10,883 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46130.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:00:12,630 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.245e+02 1.689e+02 1.934e+02 2.285e+02 6.162e+02, threshold=3.867e+02, percent-clipped=1.0 +2023-04-26 22:00:27,922 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0373, 1.4626, 1.3469, 1.7542, 1.5337, 1.8407, 1.2473, 3.6720], + device='cuda:0'), covar=tensor([0.0841, 0.1131, 0.1067, 0.1419, 0.0885, 0.0694, 0.1046, 0.0198], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 22:00:30,407 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5427, 1.7257, 1.3928, 1.0698, 1.1989, 1.2161, 1.4025, 1.1382], + device='cuda:0'), covar=tensor([0.1893, 0.1548, 0.1661, 0.2028, 0.2613, 0.2054, 0.1146, 0.2185], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0216, 0.0171, 0.0204, 0.0205, 0.0185, 0.0162, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 22:00:42,789 INFO [finetune.py:976] (0/7) Epoch 9, batch 350, loss[loss=0.3063, simple_loss=0.348, pruned_loss=0.1322, over 4926.00 frames. ], tot_loss[loss=0.2002, simple_loss=0.2654, pruned_loss=0.06748, over 790618.35 frames. ], batch size: 42, lr: 3.81e-03, grad_scale: 32.0 +2023-04-26 22:01:38,761 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46215.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:01:48,011 INFO [finetune.py:976] (0/7) Epoch 9, batch 400, loss[loss=0.1915, simple_loss=0.2558, pruned_loss=0.06361, over 4828.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.2656, pruned_loss=0.06729, over 826066.92 frames. ], batch size: 49, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:02:01,095 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.830e+02 2.182e+02 2.472e+02 8.213e+02, threshold=4.364e+02, percent-clipped=3.0 +2023-04-26 22:02:26,905 INFO [finetune.py:976] (0/7) Epoch 9, batch 450, loss[loss=0.1349, simple_loss=0.2059, pruned_loss=0.03196, over 4929.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2631, pruned_loss=0.0659, over 854243.77 frames. ], batch size: 33, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:02:29,456 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46276.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:03:00,647 INFO [finetune.py:976] (0/7) Epoch 9, batch 500, loss[loss=0.1503, simple_loss=0.2152, pruned_loss=0.04274, over 4748.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2602, pruned_loss=0.0648, over 878102.10 frames. ], batch size: 26, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:03:07,759 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.712e+02 1.942e+02 2.347e+02 4.298e+02, threshold=3.884e+02, percent-clipped=0.0 +2023-04-26 22:03:10,761 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46337.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:03:16,035 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-26 22:03:25,993 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46358.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:03:26,687 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-26 22:03:34,346 INFO [finetune.py:976] (0/7) Epoch 9, batch 550, loss[loss=0.2343, simple_loss=0.293, pruned_loss=0.0878, over 4812.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2579, pruned_loss=0.06444, over 895768.26 frames. ], batch size: 40, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:03:57,934 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=46406.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:04:01,695 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4845, 1.2809, 1.2260, 1.2779, 1.7826, 1.3974, 1.1274, 1.1802], + device='cuda:0'), covar=tensor([0.1683, 0.1151, 0.1695, 0.1271, 0.0628, 0.1335, 0.1600, 0.1826], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0322, 0.0351, 0.0299, 0.0338, 0.0322, 0.0307, 0.0352], + device='cuda:0'), out_proj_covar=tensor([6.5362e-05, 6.8273e-05, 7.5811e-05, 6.1986e-05, 7.1194e-05, 6.9239e-05, + 6.6182e-05, 7.5651e-05], device='cuda:0') +2023-04-26 22:04:07,673 INFO [finetune.py:976] (0/7) Epoch 9, batch 600, loss[loss=0.2635, simple_loss=0.3279, pruned_loss=0.0995, over 4806.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2599, pruned_loss=0.06584, over 908192.17 frames. ], batch size: 45, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:04:12,571 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46430.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:04:14,306 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.294e+02 1.692e+02 1.978e+02 2.446e+02 4.653e+02, threshold=3.955e+02, percent-clipped=2.0 +2023-04-26 22:04:21,584 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4517, 1.6033, 1.7277, 1.8720, 1.7659, 1.9811, 1.8164, 1.7733], + device='cuda:0'), covar=tensor([0.4988, 0.7574, 0.6599, 0.6221, 0.7422, 0.9733, 0.7698, 0.7042], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0382, 0.0315, 0.0326, 0.0341, 0.0402, 0.0363, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 22:04:40,995 INFO [finetune.py:976] (0/7) Epoch 9, batch 650, loss[loss=0.2384, simple_loss=0.2847, pruned_loss=0.09605, over 4911.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.2629, pruned_loss=0.06717, over 919855.27 frames. ], batch size: 37, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:04:44,729 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=46478.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:04:55,586 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-26 22:05:10,682 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46515.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:05:14,901 INFO [finetune.py:976] (0/7) Epoch 9, batch 700, loss[loss=0.258, simple_loss=0.3054, pruned_loss=0.1053, over 4922.00 frames. ], tot_loss[loss=0.2, simple_loss=0.2649, pruned_loss=0.0675, over 928560.46 frames. ], batch size: 38, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:05:21,598 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.297e+02 1.714e+02 2.090e+02 2.739e+02 4.841e+02, threshold=4.179e+02, percent-clipped=4.0 +2023-04-26 22:05:30,595 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3344, 1.1849, 1.5975, 1.4596, 1.2368, 1.0447, 1.2528, 0.8180], + device='cuda:0'), covar=tensor([0.0605, 0.0642, 0.0425, 0.0631, 0.0632, 0.1240, 0.0530, 0.0720], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0074, 0.0072, 0.0067, 0.0076, 0.0096, 0.0078, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 22:05:43,194 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=46563.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:05:53,548 INFO [finetune.py:976] (0/7) Epoch 9, batch 750, loss[loss=0.1774, simple_loss=0.2482, pruned_loss=0.05328, over 4813.00 frames. ], tot_loss[loss=0.2001, simple_loss=0.266, pruned_loss=0.06707, over 935887.34 frames. ], batch size: 33, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:06:18,450 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46595.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:06:28,385 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46602.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:06:58,618 INFO [finetune.py:976] (0/7) Epoch 9, batch 800, loss[loss=0.1449, simple_loss=0.1969, pruned_loss=0.04648, over 4285.00 frames. ], tot_loss[loss=0.2003, simple_loss=0.2661, pruned_loss=0.06723, over 940233.69 frames. ], batch size: 18, lr: 3.80e-03, grad_scale: 64.0 +2023-04-26 22:07:09,947 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46632.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:07:10,488 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.620e+02 1.991e+02 2.380e+02 4.805e+02, threshold=3.982e+02, percent-clipped=1.0 +2023-04-26 22:07:26,181 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46656.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 22:07:27,343 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-26 22:07:31,879 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46663.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:07:37,206 INFO [finetune.py:976] (0/7) Epoch 9, batch 850, loss[loss=0.214, simple_loss=0.2766, pruned_loss=0.0757, over 4819.00 frames. ], tot_loss[loss=0.1971, simple_loss=0.2626, pruned_loss=0.06583, over 943041.83 frames. ], batch size: 41, lr: 3.80e-03, grad_scale: 64.0 +2023-04-26 22:08:10,359 INFO [finetune.py:976] (0/7) Epoch 9, batch 900, loss[loss=0.1957, simple_loss=0.2485, pruned_loss=0.07144, over 4850.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2588, pruned_loss=0.06423, over 947284.27 frames. ], batch size: 49, lr: 3.80e-03, grad_scale: 64.0 +2023-04-26 22:08:17,022 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.197e+02 1.643e+02 2.086e+02 2.450e+02 5.487e+02, threshold=4.172e+02, percent-clipped=3.0 +2023-04-26 22:08:43,452 INFO [finetune.py:976] (0/7) Epoch 9, batch 950, loss[loss=0.2538, simple_loss=0.3091, pruned_loss=0.09925, over 4713.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2581, pruned_loss=0.06415, over 950329.11 frames. ], batch size: 59, lr: 3.80e-03, grad_scale: 64.0 +2023-04-26 22:09:07,673 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=46810.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:09:16,375 INFO [finetune.py:976] (0/7) Epoch 9, batch 1000, loss[loss=0.2041, simple_loss=0.2746, pruned_loss=0.06683, over 4895.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2599, pruned_loss=0.06474, over 953161.70 frames. ], batch size: 35, lr: 3.80e-03, grad_scale: 64.0 +2023-04-26 22:09:22,995 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.599e+02 1.946e+02 2.521e+02 4.625e+02, threshold=3.893e+02, percent-clipped=2.0 +2023-04-26 22:09:49,148 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=46871.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:09:49,626 INFO [finetune.py:976] (0/7) Epoch 9, batch 1050, loss[loss=0.1887, simple_loss=0.2618, pruned_loss=0.05774, over 4869.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.2613, pruned_loss=0.06475, over 953695.06 frames. ], batch size: 34, lr: 3.80e-03, grad_scale: 64.0 +2023-04-26 22:10:22,798 INFO [finetune.py:976] (0/7) Epoch 9, batch 1100, loss[loss=0.2156, simple_loss=0.2824, pruned_loss=0.07445, over 4914.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.2628, pruned_loss=0.06541, over 953982.80 frames. ], batch size: 38, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:10:26,395 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.8561, 4.7166, 3.3155, 5.4969, 4.8782, 4.8209, 2.5428, 4.7955], + device='cuda:0'), covar=tensor([0.1444, 0.0890, 0.2964, 0.0921, 0.2570, 0.1549, 0.4996, 0.2183], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0218, 0.0252, 0.0307, 0.0301, 0.0252, 0.0271, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 22:10:28,834 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3591, 3.2985, 2.4489, 3.8751, 3.3350, 3.3672, 1.4514, 3.3007], + device='cuda:0'), covar=tensor([0.1989, 0.1479, 0.3479, 0.2261, 0.3116, 0.1971, 0.5784, 0.2735], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0217, 0.0251, 0.0307, 0.0301, 0.0252, 0.0270, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 22:10:29,446 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=46932.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:10:30,585 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.585e+02 1.927e+02 2.625e+02 4.611e+02, threshold=3.853e+02, percent-clipped=4.0 +2023-04-26 22:10:40,917 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46951.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 22:10:45,221 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=46958.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:10:56,081 INFO [finetune.py:976] (0/7) Epoch 9, batch 1150, loss[loss=0.1864, simple_loss=0.2574, pruned_loss=0.05769, over 4780.00 frames. ], tot_loss[loss=0.199, simple_loss=0.2651, pruned_loss=0.06643, over 953539.06 frames. ], batch size: 51, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:11:01,453 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=46980.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:11:43,960 INFO [finetune.py:976] (0/7) Epoch 9, batch 1200, loss[loss=0.1916, simple_loss=0.2622, pruned_loss=0.0605, over 4789.00 frames. ], tot_loss[loss=0.1968, simple_loss=0.263, pruned_loss=0.06528, over 954118.82 frames. ], batch size: 51, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:11:50,035 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4285, 1.3401, 1.7256, 1.7102, 1.3851, 1.1171, 1.4582, 1.0454], + device='cuda:0'), covar=tensor([0.0786, 0.0809, 0.0501, 0.0753, 0.0766, 0.1281, 0.0663, 0.0760], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0074, 0.0072, 0.0067, 0.0076, 0.0096, 0.0079, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 22:12:03,720 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.560e+02 1.899e+02 2.282e+02 5.310e+02, threshold=3.798e+02, percent-clipped=1.0 +2023-04-26 22:12:14,430 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1792, 2.2576, 2.4765, 2.6829, 2.6808, 2.1606, 1.8689, 2.2850], + device='cuda:0'), covar=tensor([0.1103, 0.1029, 0.0618, 0.0737, 0.0726, 0.0993, 0.0968, 0.0676], + device='cuda:0'), in_proj_covar=tensor([0.0201, 0.0207, 0.0184, 0.0179, 0.0181, 0.0194, 0.0163, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 22:12:55,841 INFO [finetune.py:976] (0/7) Epoch 9, batch 1250, loss[loss=0.2272, simple_loss=0.2859, pruned_loss=0.08419, over 4858.00 frames. ], tot_loss[loss=0.1951, simple_loss=0.2604, pruned_loss=0.06485, over 955294.73 frames. ], batch size: 49, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:14:00,075 INFO [finetune.py:976] (0/7) Epoch 9, batch 1300, loss[loss=0.168, simple_loss=0.2345, pruned_loss=0.05078, over 4915.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2569, pruned_loss=0.06375, over 955743.20 frames. ], batch size: 37, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:14:15,264 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.673e+02 1.914e+02 2.315e+02 4.014e+02, threshold=3.828e+02, percent-clipped=2.0 +2023-04-26 22:14:58,575 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47166.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:15:07,950 INFO [finetune.py:976] (0/7) Epoch 9, batch 1350, loss[loss=0.2092, simple_loss=0.2854, pruned_loss=0.06649, over 4702.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2587, pruned_loss=0.06484, over 955450.30 frames. ], batch size: 59, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:15:58,006 INFO [finetune.py:976] (0/7) Epoch 9, batch 1400, loss[loss=0.193, simple_loss=0.2568, pruned_loss=0.06463, over 4765.00 frames. ], tot_loss[loss=0.1975, simple_loss=0.2614, pruned_loss=0.06678, over 953504.61 frames. ], batch size: 26, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:16:06,778 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.248e+02 1.819e+02 2.122e+02 2.454e+02 4.582e+02, threshold=4.244e+02, percent-clipped=5.0 +2023-04-26 22:16:08,984 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-26 22:16:11,713 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47241.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:16:17,764 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47251.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:16:22,002 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47258.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:16:30,889 INFO [finetune.py:976] (0/7) Epoch 9, batch 1450, loss[loss=0.1801, simple_loss=0.2513, pruned_loss=0.05446, over 4751.00 frames. ], tot_loss[loss=0.1994, simple_loss=0.2641, pruned_loss=0.06735, over 954892.99 frames. ], batch size: 26, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:16:34,646 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7807, 2.1476, 1.7446, 1.4385, 1.2827, 1.3216, 1.8091, 1.2678], + device='cuda:0'), covar=tensor([0.1711, 0.1538, 0.1604, 0.2004, 0.2525, 0.2069, 0.1145, 0.2106], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0215, 0.0170, 0.0203, 0.0204, 0.0183, 0.0161, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 22:16:50,046 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=47299.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:16:51,884 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47302.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:16:53,685 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5546, 2.5283, 2.0135, 2.9381, 2.5574, 2.5828, 1.1319, 2.4937], + device='cuda:0'), covar=tensor([0.1939, 0.1610, 0.2744, 0.2194, 0.3056, 0.1948, 0.4961, 0.2448], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0217, 0.0251, 0.0306, 0.0300, 0.0252, 0.0271, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 22:16:54,267 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=47306.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:17:03,913 INFO [finetune.py:976] (0/7) Epoch 9, batch 1500, loss[loss=0.2041, simple_loss=0.2723, pruned_loss=0.06798, over 4886.00 frames. ], tot_loss[loss=0.1993, simple_loss=0.2649, pruned_loss=0.06692, over 954386.67 frames. ], batch size: 35, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:17:12,631 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.650e+02 1.825e+02 2.290e+02 4.290e+02, threshold=3.651e+02, percent-clipped=1.0 +2023-04-26 22:17:53,991 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9068, 3.8055, 2.7759, 4.5633, 4.0591, 3.9160, 1.7496, 3.9058], + device='cuda:0'), covar=tensor([0.1992, 0.1341, 0.3377, 0.1587, 0.2768, 0.2100, 0.6185, 0.2645], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0217, 0.0252, 0.0307, 0.0301, 0.0252, 0.0271, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 22:18:03,332 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7412, 1.9866, 1.1855, 1.4315, 2.2853, 1.6069, 1.5700, 1.6411], + device='cuda:0'), covar=tensor([0.0525, 0.0372, 0.0338, 0.0611, 0.0253, 0.0572, 0.0551, 0.0605], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0050, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 22:18:03,818 INFO [finetune.py:976] (0/7) Epoch 9, batch 1550, loss[loss=0.1398, simple_loss=0.2116, pruned_loss=0.03396, over 4790.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2641, pruned_loss=0.06611, over 954018.86 frames. ], batch size: 29, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:18:13,645 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47379.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:18:49,434 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9810, 1.8625, 2.0997, 2.3611, 2.4255, 1.8940, 1.5160, 2.0287], + device='cuda:0'), covar=tensor([0.0934, 0.1027, 0.0622, 0.0589, 0.0555, 0.0871, 0.0887, 0.0633], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0205, 0.0182, 0.0177, 0.0178, 0.0190, 0.0161, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 22:19:10,271 INFO [finetune.py:976] (0/7) Epoch 9, batch 1600, loss[loss=0.1925, simple_loss=0.2455, pruned_loss=0.06977, over 4696.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2607, pruned_loss=0.06497, over 954611.05 frames. ], batch size: 23, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:19:23,851 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.649e+02 1.912e+02 2.310e+02 4.649e+02, threshold=3.824e+02, percent-clipped=3.0 +2023-04-26 22:19:34,490 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47440.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:19:46,072 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8281, 1.0067, 1.4695, 1.6254, 1.6061, 1.7413, 1.4819, 1.4739], + device='cuda:0'), covar=tensor([0.4957, 0.6835, 0.5608, 0.5037, 0.6246, 0.9104, 0.6238, 0.5776], + device='cuda:0'), in_proj_covar=tensor([0.0325, 0.0386, 0.0319, 0.0328, 0.0344, 0.0406, 0.0366, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 22:20:08,738 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47466.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:20:17,937 INFO [finetune.py:976] (0/7) Epoch 9, batch 1650, loss[loss=0.1839, simple_loss=0.2459, pruned_loss=0.06099, over 4921.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.258, pruned_loss=0.06394, over 954988.93 frames. ], batch size: 37, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:20:46,545 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-26 22:21:00,455 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=47514.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:21:05,269 INFO [finetune.py:976] (0/7) Epoch 9, batch 1700, loss[loss=0.1388, simple_loss=0.2195, pruned_loss=0.029, over 4791.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2566, pruned_loss=0.06372, over 954521.00 frames. ], batch size: 29, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:21:12,553 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.638e+02 2.054e+02 2.588e+02 3.948e+02, threshold=4.108e+02, percent-clipped=3.0 +2023-04-26 22:21:14,444 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9879, 1.4674, 1.6284, 1.6933, 2.1347, 1.8527, 1.4117, 1.5327], + device='cuda:0'), covar=tensor([0.1418, 0.1421, 0.1643, 0.1091, 0.0734, 0.1160, 0.2050, 0.1844], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0328, 0.0358, 0.0303, 0.0340, 0.0328, 0.0311, 0.0358], + device='cuda:0'), out_proj_covar=tensor([6.6300e-05, 6.9546e-05, 7.7312e-05, 6.2706e-05, 7.1399e-05, 7.0452e-05, + 6.6853e-05, 7.6904e-05], device='cuda:0') +2023-04-26 22:21:38,485 INFO [finetune.py:976] (0/7) Epoch 9, batch 1750, loss[loss=0.2587, simple_loss=0.3282, pruned_loss=0.09461, over 4843.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2581, pruned_loss=0.06447, over 954632.27 frames. ], batch size: 49, lr: 3.80e-03, grad_scale: 32.0 +2023-04-26 22:21:50,030 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0797, 2.4874, 1.0256, 1.3355, 1.8128, 1.2350, 3.4589, 1.8949], + device='cuda:0'), covar=tensor([0.0678, 0.0746, 0.0798, 0.1354, 0.0577, 0.1055, 0.0228, 0.0610], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0068, 0.0050, 0.0047, 0.0052, 0.0053, 0.0079, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-26 22:21:54,722 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47597.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:22:11,819 INFO [finetune.py:976] (0/7) Epoch 9, batch 1800, loss[loss=0.2042, simple_loss=0.2671, pruned_loss=0.07064, over 4793.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2602, pruned_loss=0.06447, over 953833.84 frames. ], batch size: 51, lr: 3.79e-03, grad_scale: 32.0 +2023-04-26 22:22:16,222 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0726, 1.6063, 1.4089, 1.6907, 1.6280, 1.8839, 1.3383, 3.5012], + device='cuda:0'), covar=tensor([0.0659, 0.0751, 0.0777, 0.1199, 0.0634, 0.0577, 0.0766, 0.0144], + device='cuda:0'), in_proj_covar=tensor([0.0039, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 22:22:19,165 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.815e+02 2.187e+02 2.572e+02 4.070e+02, threshold=4.375e+02, percent-clipped=0.0 +2023-04-26 22:22:37,656 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-26 22:22:45,313 INFO [finetune.py:976] (0/7) Epoch 9, batch 1850, loss[loss=0.2267, simple_loss=0.2876, pruned_loss=0.08293, over 4795.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2619, pruned_loss=0.06507, over 953855.44 frames. ], batch size: 51, lr: 3.79e-03, grad_scale: 32.0 +2023-04-26 22:22:49,151 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4285, 1.4763, 1.4887, 2.1774, 2.2304, 1.8798, 1.8559, 1.5872], + device='cuda:0'), covar=tensor([0.1900, 0.2368, 0.2624, 0.1804, 0.1619, 0.2186, 0.2877, 0.2633], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0328, 0.0358, 0.0303, 0.0341, 0.0329, 0.0312, 0.0359], + device='cuda:0'), out_proj_covar=tensor([6.6486e-05, 6.9716e-05, 7.7308e-05, 6.2727e-05, 7.1550e-05, 7.0739e-05, + 6.7069e-05, 7.7107e-05], device='cuda:0') +2023-04-26 22:23:44,012 INFO [finetune.py:976] (0/7) Epoch 9, batch 1900, loss[loss=0.2624, simple_loss=0.3277, pruned_loss=0.09857, over 4890.00 frames. ], tot_loss[loss=0.1974, simple_loss=0.2635, pruned_loss=0.0656, over 952749.23 frames. ], batch size: 43, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:24:02,107 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.223e+02 1.717e+02 1.936e+02 2.333e+02 4.969e+02, threshold=3.871e+02, percent-clipped=1.0 +2023-04-26 22:24:02,193 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=47735.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:24:04,083 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47738.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:24:33,490 INFO [finetune.py:976] (0/7) Epoch 9, batch 1950, loss[loss=0.1797, simple_loss=0.2549, pruned_loss=0.05222, over 4802.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2623, pruned_loss=0.06468, over 954755.02 frames. ], batch size: 41, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:24:43,276 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47788.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:24:50,424 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47799.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:24:57,024 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=47809.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 22:25:10,594 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1824, 1.5825, 2.0418, 2.3636, 1.9868, 1.6019, 1.1430, 1.7570], + device='cuda:0'), covar=tensor([0.3618, 0.3942, 0.1929, 0.2598, 0.2877, 0.2951, 0.4904, 0.2664], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0253, 0.0221, 0.0321, 0.0215, 0.0230, 0.0237, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 22:25:11,663 INFO [finetune.py:976] (0/7) Epoch 9, batch 2000, loss[loss=0.198, simple_loss=0.2483, pruned_loss=0.07386, over 4849.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2583, pruned_loss=0.06357, over 953800.01 frames. ], batch size: 44, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:25:30,748 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.686e+02 2.019e+02 2.338e+02 4.477e+02, threshold=4.037e+02, percent-clipped=4.0 +2023-04-26 22:25:44,887 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47849.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:26:06,340 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-26 22:26:17,408 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=47870.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 22:26:24,631 INFO [finetune.py:976] (0/7) Epoch 9, batch 2050, loss[loss=0.1796, simple_loss=0.2455, pruned_loss=0.05688, over 4755.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2556, pruned_loss=0.06288, over 954931.87 frames. ], batch size: 28, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:26:52,054 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=47897.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:27:12,921 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3605, 1.7355, 2.1863, 2.9269, 2.2388, 1.7496, 1.6095, 2.1170], + device='cuda:0'), covar=tensor([0.3872, 0.4011, 0.1926, 0.2820, 0.3255, 0.3041, 0.4682, 0.2868], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0252, 0.0221, 0.0321, 0.0214, 0.0229, 0.0236, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 22:27:30,785 INFO [finetune.py:976] (0/7) Epoch 9, batch 2100, loss[loss=0.204, simple_loss=0.2756, pruned_loss=0.06625, over 4909.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2554, pruned_loss=0.06297, over 954310.73 frames. ], batch size: 37, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:27:39,268 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.719e+02 2.114e+02 2.573e+02 5.213e+02, threshold=4.228e+02, percent-clipped=2.0 +2023-04-26 22:27:42,596 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-04-26 22:27:43,694 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5033, 0.9521, 1.2299, 1.2162, 1.6523, 1.3513, 1.0928, 1.1578], + device='cuda:0'), covar=tensor([0.1554, 0.1518, 0.1875, 0.1313, 0.0950, 0.1312, 0.1940, 0.1975], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0324, 0.0352, 0.0298, 0.0337, 0.0325, 0.0306, 0.0354], + device='cuda:0'), out_proj_covar=tensor([6.5429e-05, 6.8875e-05, 7.6104e-05, 6.1639e-05, 7.0678e-05, 6.9867e-05, + 6.5901e-05, 7.5887e-05], device='cuda:0') +2023-04-26 22:27:45,500 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=47945.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:28:20,274 INFO [finetune.py:976] (0/7) Epoch 9, batch 2150, loss[loss=0.3047, simple_loss=0.3435, pruned_loss=0.133, over 4909.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2589, pruned_loss=0.06414, over 954577.87 frames. ], batch size: 35, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:28:43,646 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-48000.pt +2023-04-26 22:29:17,268 INFO [finetune.py:976] (0/7) Epoch 9, batch 2200, loss[loss=0.1927, simple_loss=0.2663, pruned_loss=0.05955, over 4815.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2614, pruned_loss=0.06459, over 952990.27 frames. ], batch size: 38, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:29:22,003 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9068, 1.4088, 1.6846, 1.6172, 1.7055, 1.3669, 0.6766, 1.3537], + device='cuda:0'), covar=tensor([0.3430, 0.3746, 0.1867, 0.2570, 0.2854, 0.2876, 0.4773, 0.2422], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0250, 0.0219, 0.0318, 0.0212, 0.0228, 0.0234, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 22:29:32,259 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.341e+02 1.685e+02 2.065e+02 2.367e+02 5.445e+02, threshold=4.130e+02, percent-clipped=2.0 +2023-04-26 22:29:32,382 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48035.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:29:42,319 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-26 22:30:13,115 INFO [finetune.py:976] (0/7) Epoch 9, batch 2250, loss[loss=0.2268, simple_loss=0.2846, pruned_loss=0.08455, over 4912.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2642, pruned_loss=0.06635, over 953511.06 frames. ], batch size: 36, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:30:13,238 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48072.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:30:32,855 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=48083.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:30:45,746 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48094.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:31:21,272 INFO [finetune.py:976] (0/7) Epoch 9, batch 2300, loss[loss=0.1705, simple_loss=0.2434, pruned_loss=0.04883, over 4895.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2643, pruned_loss=0.06614, over 953892.34 frames. ], batch size: 32, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:31:33,406 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.68 vs. limit=5.0 +2023-04-26 22:31:35,714 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48133.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:31:36,816 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.671e+02 1.981e+02 2.243e+02 3.920e+02, threshold=3.963e+02, percent-clipped=0.0 +2023-04-26 22:31:50,252 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48144.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:32:14,434 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48165.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 22:32:16,860 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-26 22:32:25,262 INFO [finetune.py:976] (0/7) Epoch 9, batch 2350, loss[loss=0.1697, simple_loss=0.2306, pruned_loss=0.05444, over 4071.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2613, pruned_loss=0.06551, over 953932.33 frames. ], batch size: 65, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:32:48,013 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8653, 1.0877, 1.4572, 1.6229, 1.5615, 1.7214, 1.5337, 1.5220], + device='cuda:0'), covar=tensor([0.4446, 0.6214, 0.5400, 0.5346, 0.6595, 0.9164, 0.5505, 0.5660], + device='cuda:0'), in_proj_covar=tensor([0.0323, 0.0383, 0.0316, 0.0325, 0.0341, 0.0404, 0.0364, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 22:32:56,619 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9376, 2.3705, 0.8676, 1.1755, 1.5551, 1.1411, 2.4817, 1.4044], + device='cuda:0'), covar=tensor([0.0680, 0.0598, 0.0698, 0.1323, 0.0491, 0.1054, 0.0340, 0.0742], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0068, 0.0050, 0.0048, 0.0052, 0.0053, 0.0079, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-26 22:33:19,204 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48212.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 22:33:21,093 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4229, 1.3489, 1.6874, 1.7260, 1.3809, 1.1372, 1.4925, 0.9841], + device='cuda:0'), covar=tensor([0.0697, 0.0808, 0.0582, 0.0662, 0.0826, 0.1157, 0.0907, 0.0885], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0073, 0.0072, 0.0067, 0.0076, 0.0096, 0.0078, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 22:33:30,439 INFO [finetune.py:976] (0/7) Epoch 9, batch 2400, loss[loss=0.161, simple_loss=0.2402, pruned_loss=0.04089, over 4907.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.2582, pruned_loss=0.06477, over 955475.18 frames. ], batch size: 46, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:33:30,646 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-26 22:33:45,095 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.712e+02 1.962e+02 2.337e+02 6.803e+02, threshold=3.925e+02, percent-clipped=4.0 +2023-04-26 22:34:34,228 INFO [finetune.py:976] (0/7) Epoch 9, batch 2450, loss[loss=0.2129, simple_loss=0.2873, pruned_loss=0.06931, over 4808.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2573, pruned_loss=0.06481, over 954455.07 frames. ], batch size: 45, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:34:34,580 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-26 22:34:34,990 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48273.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 22:35:25,352 INFO [finetune.py:976] (0/7) Epoch 9, batch 2500, loss[loss=0.2618, simple_loss=0.3276, pruned_loss=0.09799, over 4842.00 frames. ], tot_loss[loss=0.1945, simple_loss=0.2587, pruned_loss=0.06516, over 952714.94 frames. ], batch size: 47, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:35:26,624 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48324.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:35:34,245 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.734e+02 2.061e+02 2.480e+02 6.002e+02, threshold=4.122e+02, percent-clipped=6.0 +2023-04-26 22:36:14,628 INFO [finetune.py:976] (0/7) Epoch 9, batch 2550, loss[loss=0.1779, simple_loss=0.2444, pruned_loss=0.05571, over 4826.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2618, pruned_loss=0.06579, over 951862.61 frames. ], batch size: 51, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:36:34,707 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48385.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:36:47,907 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48394.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:36:55,537 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48397.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:37:28,005 INFO [finetune.py:976] (0/7) Epoch 9, batch 2600, loss[loss=0.2367, simple_loss=0.2893, pruned_loss=0.09208, over 4907.00 frames. ], tot_loss[loss=0.1976, simple_loss=0.2629, pruned_loss=0.06611, over 953104.93 frames. ], batch size: 37, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:37:28,726 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48423.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:37:31,744 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48428.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:37:41,883 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.066e+02 1.714e+02 2.123e+02 2.498e+02 4.905e+02, threshold=4.246e+02, percent-clipped=1.0 +2023-04-26 22:37:52,433 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=48442.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:37:54,160 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48444.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:38:13,982 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48458.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:38:24,203 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48465.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 22:38:34,661 INFO [finetune.py:976] (0/7) Epoch 9, batch 2650, loss[loss=0.1859, simple_loss=0.257, pruned_loss=0.05736, over 4881.00 frames. ], tot_loss[loss=0.1997, simple_loss=0.2653, pruned_loss=0.06707, over 955186.09 frames. ], batch size: 32, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:38:46,961 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48484.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:38:57,120 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=48492.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:39:21,187 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=48513.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 22:39:32,428 INFO [finetune.py:976] (0/7) Epoch 9, batch 2700, loss[loss=0.2072, simple_loss=0.2668, pruned_loss=0.07377, over 4859.00 frames. ], tot_loss[loss=0.1983, simple_loss=0.2642, pruned_loss=0.06622, over 955255.45 frames. ], batch size: 34, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:39:40,875 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.732e+02 1.990e+02 2.443e+02 3.754e+02, threshold=3.980e+02, percent-clipped=0.0 +2023-04-26 22:40:20,117 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48568.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 22:40:28,389 INFO [finetune.py:976] (0/7) Epoch 9, batch 2750, loss[loss=0.2214, simple_loss=0.2686, pruned_loss=0.08713, over 4847.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2611, pruned_loss=0.06551, over 956296.66 frames. ], batch size: 44, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:41:33,773 INFO [finetune.py:976] (0/7) Epoch 9, batch 2800, loss[loss=0.1905, simple_loss=0.2556, pruned_loss=0.06275, over 4913.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2573, pruned_loss=0.06421, over 954565.76 frames. ], batch size: 36, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:41:45,911 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.27 vs. limit=5.0 +2023-04-26 22:41:46,303 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.102e+02 1.615e+02 1.895e+02 2.284e+02 5.384e+02, threshold=3.791e+02, percent-clipped=3.0 +2023-04-26 22:41:59,193 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-26 22:42:17,323 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.23 vs. limit=5.0 +2023-04-26 22:42:38,717 INFO [finetune.py:976] (0/7) Epoch 9, batch 2850, loss[loss=0.2205, simple_loss=0.2899, pruned_loss=0.07557, over 4800.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.2562, pruned_loss=0.06381, over 954075.98 frames. ], batch size: 51, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:42:49,416 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48680.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:43:11,705 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6401, 1.7303, 1.0589, 1.3675, 2.0640, 1.5420, 1.4471, 1.4660], + device='cuda:0'), covar=tensor([0.0500, 0.0368, 0.0347, 0.0542, 0.0273, 0.0522, 0.0486, 0.0586], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0030, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0050, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 22:43:13,535 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4564, 3.3934, 0.8896, 1.8972, 1.8750, 2.5350, 1.8968, 0.9824], + device='cuda:0'), covar=tensor([0.1390, 0.0936, 0.2002, 0.1279, 0.1071, 0.1016, 0.1514, 0.2067], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0250, 0.0141, 0.0122, 0.0135, 0.0154, 0.0118, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 22:43:44,377 INFO [finetune.py:976] (0/7) Epoch 9, batch 2900, loss[loss=0.2854, simple_loss=0.3289, pruned_loss=0.121, over 4161.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.26, pruned_loss=0.06487, over 954013.02 frames. ], batch size: 65, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:43:48,137 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48728.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:43:52,844 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.707e+02 2.011e+02 2.462e+02 4.094e+02, threshold=4.022e+02, percent-clipped=3.0 +2023-04-26 22:44:04,262 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48753.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:44:14,389 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-26 22:44:17,828 INFO [finetune.py:976] (0/7) Epoch 9, batch 2950, loss[loss=0.1741, simple_loss=0.2489, pruned_loss=0.0497, over 4903.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2619, pruned_loss=0.06543, over 952520.16 frames. ], batch size: 37, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:44:20,346 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=48776.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:44:22,218 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=48779.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:45:10,534 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48821.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:45:11,036 INFO [finetune.py:976] (0/7) Epoch 9, batch 3000, loss[loss=0.2086, simple_loss=0.2627, pruned_loss=0.0772, over 4168.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2623, pruned_loss=0.06571, over 953662.13 frames. ], batch size: 18, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:45:11,038 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 22:45:27,359 INFO [finetune.py:1010] (0/7) Epoch 9, validation: loss=0.1543, simple_loss=0.2267, pruned_loss=0.04097, over 2265189.00 frames. +2023-04-26 22:45:27,360 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 22:45:46,032 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.720e+02 1.968e+02 2.331e+02 3.766e+02, threshold=3.936e+02, percent-clipped=0.0 +2023-04-26 22:45:47,987 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=48838.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:46:21,171 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-26 22:46:30,381 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48868.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 22:46:32,696 INFO [finetune.py:976] (0/7) Epoch 9, batch 3050, loss[loss=0.1423, simple_loss=0.213, pruned_loss=0.03577, over 4748.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2632, pruned_loss=0.06571, over 954484.74 frames. ], batch size: 27, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:46:51,100 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48882.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:47:12,118 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=48899.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:47:28,835 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=48916.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 22:47:32,442 INFO [finetune.py:976] (0/7) Epoch 9, batch 3100, loss[loss=0.1772, simple_loss=0.2412, pruned_loss=0.05664, over 4745.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2618, pruned_loss=0.06549, over 956076.75 frames. ], batch size: 27, lr: 3.79e-03, grad_scale: 16.0 +2023-04-26 22:47:42,242 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.703e+02 2.023e+02 2.523e+02 5.306e+02, threshold=4.046e+02, percent-clipped=4.0 +2023-04-26 22:48:00,616 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-26 22:48:04,662 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.42 vs. limit=5.0 +2023-04-26 22:48:05,673 INFO [finetune.py:976] (0/7) Epoch 9, batch 3150, loss[loss=0.1928, simple_loss=0.2596, pruned_loss=0.06297, over 4827.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2581, pruned_loss=0.06417, over 956496.12 frames. ], batch size: 33, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:48:11,543 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=48980.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:48:38,602 INFO [finetune.py:976] (0/7) Epoch 9, batch 3200, loss[loss=0.1823, simple_loss=0.2507, pruned_loss=0.05698, over 4907.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2546, pruned_loss=0.06298, over 957080.09 frames. ], batch size: 35, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:48:42,795 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=49028.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:48:43,041 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-26 22:48:47,937 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.546e+02 1.866e+02 2.430e+02 3.518e+02, threshold=3.733e+02, percent-clipped=0.0 +2023-04-26 22:48:59,904 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49053.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:49:12,011 INFO [finetune.py:976] (0/7) Epoch 9, batch 3250, loss[loss=0.1616, simple_loss=0.2318, pruned_loss=0.04564, over 4727.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2554, pruned_loss=0.06315, over 956506.23 frames. ], batch size: 23, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:49:16,901 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49079.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:49:32,075 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=49101.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:49:45,283 INFO [finetune.py:976] (0/7) Epoch 9, batch 3300, loss[loss=0.2545, simple_loss=0.3046, pruned_loss=0.1022, over 4792.00 frames. ], tot_loss[loss=0.195, simple_loss=0.26, pruned_loss=0.065, over 957007.19 frames. ], batch size: 51, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:49:48,901 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=49127.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:49:54,142 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.776e+02 1.982e+02 2.505e+02 4.368e+02, threshold=3.963e+02, percent-clipped=2.0 +2023-04-26 22:50:18,718 INFO [finetune.py:976] (0/7) Epoch 9, batch 3350, loss[loss=0.2343, simple_loss=0.3001, pruned_loss=0.08427, over 4755.00 frames. ], tot_loss[loss=0.1972, simple_loss=0.2623, pruned_loss=0.06605, over 953112.62 frames. ], batch size: 54, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:50:20,192 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-26 22:50:21,869 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49177.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:50:38,269 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-26 22:50:38,865 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6527, 1.2278, 1.7135, 2.1298, 1.7671, 1.6252, 1.6684, 1.7278], + device='cuda:0'), covar=tensor([0.6240, 0.8960, 0.8857, 0.8851, 0.7673, 1.0404, 1.0632, 0.9564], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0421, 0.0502, 0.0523, 0.0438, 0.0457, 0.0468, 0.0464], + device='cuda:0'), out_proj_covar=tensor([9.9418e-05, 1.0417e-04, 1.1343e-04, 1.2404e-04, 1.0618e-04, 1.1043e-04, + 1.1241e-04, 1.1237e-04], device='cuda:0') +2023-04-26 22:50:46,358 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49194.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:51:00,027 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.5886, 4.4576, 3.1263, 5.2801, 4.5904, 4.5567, 2.1629, 4.5121], + device='cuda:0'), covar=tensor([0.1563, 0.0950, 0.2781, 0.0875, 0.2107, 0.1598, 0.5218, 0.1927], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0217, 0.0251, 0.0306, 0.0302, 0.0252, 0.0271, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 22:51:19,515 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6466, 1.6151, 0.8925, 1.3201, 1.9491, 1.5220, 1.3937, 1.3963], + device='cuda:0'), covar=tensor([0.0502, 0.0397, 0.0370, 0.0555, 0.0274, 0.0508, 0.0502, 0.0586], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0024, 0.0030, 0.0021, 0.0029, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0050, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 22:51:20,617 INFO [finetune.py:976] (0/7) Epoch 9, batch 3400, loss[loss=0.2028, simple_loss=0.2755, pruned_loss=0.065, over 4895.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2618, pruned_loss=0.06544, over 952776.52 frames. ], batch size: 43, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:51:31,133 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-26 22:51:38,491 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.560e+02 1.880e+02 2.304e+02 5.617e+02, threshold=3.759e+02, percent-clipped=2.0 +2023-04-26 22:51:39,201 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49236.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:52:13,321 INFO [finetune.py:976] (0/7) Epoch 9, batch 3450, loss[loss=0.2088, simple_loss=0.2644, pruned_loss=0.07655, over 4825.00 frames. ], tot_loss[loss=0.1958, simple_loss=0.2615, pruned_loss=0.065, over 951754.31 frames. ], batch size: 30, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:52:15,319 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0279, 1.7564, 2.0200, 2.3740, 2.3336, 1.7816, 1.4913, 1.9578], + device='cuda:0'), covar=tensor([0.0910, 0.1161, 0.0747, 0.0574, 0.0581, 0.0956, 0.0928, 0.0678], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0205, 0.0182, 0.0177, 0.0179, 0.0189, 0.0160, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 22:52:30,776 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49297.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:53:02,588 INFO [finetune.py:976] (0/7) Epoch 9, batch 3500, loss[loss=0.2091, simple_loss=0.2698, pruned_loss=0.07422, over 4821.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.2584, pruned_loss=0.06406, over 952184.53 frames. ], batch size: 38, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:53:15,830 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.247e+02 1.587e+02 1.893e+02 2.359e+02 3.521e+02, threshold=3.785e+02, percent-clipped=0.0 +2023-04-26 22:53:27,410 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-26 22:54:01,000 INFO [finetune.py:976] (0/7) Epoch 9, batch 3550, loss[loss=0.2333, simple_loss=0.2923, pruned_loss=0.08719, over 4939.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.256, pruned_loss=0.06363, over 953163.40 frames. ], batch size: 33, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:55:07,553 INFO [finetune.py:976] (0/7) Epoch 9, batch 3600, loss[loss=0.1795, simple_loss=0.2417, pruned_loss=0.05863, over 4762.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2535, pruned_loss=0.06217, over 955000.83 frames. ], batch size: 28, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:55:07,647 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49422.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:55:17,831 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8261, 2.0466, 1.9111, 2.1057, 1.8722, 2.0754, 2.0557, 1.9838], + device='cuda:0'), covar=tensor([0.5047, 0.8141, 0.6915, 0.5813, 0.7261, 0.9610, 0.8255, 0.7448], + device='cuda:0'), in_proj_covar=tensor([0.0321, 0.0383, 0.0314, 0.0324, 0.0340, 0.0401, 0.0361, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 22:55:24,181 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49432.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 22:55:25,884 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.649e+02 1.885e+02 2.480e+02 5.265e+02, threshold=3.771e+02, percent-clipped=3.0 +2023-04-26 22:56:18,973 INFO [finetune.py:976] (0/7) Epoch 9, batch 3650, loss[loss=0.1709, simple_loss=0.2402, pruned_loss=0.05082, over 4725.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.255, pruned_loss=0.06272, over 955208.94 frames. ], batch size: 23, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:56:22,182 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49477.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:56:30,062 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49483.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:56:35,221 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-04-26 22:56:36,683 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49493.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 22:56:37,274 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49494.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:56:54,469 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-26 22:56:56,124 INFO [finetune.py:976] (0/7) Epoch 9, batch 3700, loss[loss=0.2341, simple_loss=0.2893, pruned_loss=0.0894, over 4153.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2601, pruned_loss=0.06483, over 951755.43 frames. ], batch size: 65, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:56:58,021 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=49525.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:57:09,402 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.074e+02 1.745e+02 1.976e+02 2.490e+02 4.358e+02, threshold=3.952e+02, percent-clipped=2.0 +2023-04-26 22:57:13,688 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=49542.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:57:56,799 INFO [finetune.py:976] (0/7) Epoch 9, batch 3750, loss[loss=0.1614, simple_loss=0.2071, pruned_loss=0.05781, over 4080.00 frames. ], tot_loss[loss=0.1942, simple_loss=0.2599, pruned_loss=0.06425, over 951080.08 frames. ], batch size: 17, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:58:23,821 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49592.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 22:58:34,889 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9015, 1.3059, 3.2863, 3.0319, 2.9331, 3.1908, 3.1773, 2.8948], + device='cuda:0'), covar=tensor([0.7014, 0.5060, 0.1324, 0.2018, 0.1354, 0.1938, 0.1640, 0.1687], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0309, 0.0408, 0.0412, 0.0352, 0.0406, 0.0317, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 22:58:44,873 INFO [finetune.py:976] (0/7) Epoch 9, batch 3800, loss[loss=0.1925, simple_loss=0.2573, pruned_loss=0.06387, over 4724.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2597, pruned_loss=0.06374, over 951451.69 frames. ], batch size: 23, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:58:52,833 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.711e+02 2.053e+02 2.525e+02 6.309e+02, threshold=4.105e+02, percent-clipped=4.0 +2023-04-26 22:59:17,906 INFO [finetune.py:976] (0/7) Epoch 9, batch 3850, loss[loss=0.2495, simple_loss=0.2977, pruned_loss=0.1006, over 4848.00 frames. ], tot_loss[loss=0.1944, simple_loss=0.2602, pruned_loss=0.06435, over 953519.85 frames. ], batch size: 47, lr: 3.78e-03, grad_scale: 16.0 +2023-04-26 22:59:25,882 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5399, 0.8726, 1.2713, 1.1356, 1.6039, 1.3111, 1.0712, 1.2296], + device='cuda:0'), covar=tensor([0.1946, 0.2206, 0.2495, 0.1978, 0.1271, 0.2018, 0.2672, 0.2779], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0324, 0.0353, 0.0297, 0.0335, 0.0322, 0.0308, 0.0356], + device='cuda:0'), out_proj_covar=tensor([6.5104e-05, 6.8880e-05, 7.6172e-05, 6.1416e-05, 7.0196e-05, 6.9075e-05, + 6.6163e-05, 7.6419e-05], device='cuda:0') +2023-04-26 22:59:42,161 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1564, 1.8975, 2.1188, 2.3092, 2.4011, 2.0186, 1.6183, 2.1371], + device='cuda:0'), covar=tensor([0.0739, 0.1036, 0.0619, 0.0610, 0.0598, 0.0855, 0.0817, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0204, 0.0181, 0.0175, 0.0177, 0.0188, 0.0158, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 22:59:49,731 INFO [finetune.py:976] (0/7) Epoch 9, batch 3900, loss[loss=0.1977, simple_loss=0.2512, pruned_loss=0.07216, over 4764.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2574, pruned_loss=0.06372, over 953912.73 frames. ], batch size: 28, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 22:59:58,108 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.293e+02 1.654e+02 1.955e+02 2.414e+02 4.265e+02, threshold=3.910e+02, percent-clipped=1.0 +2023-04-26 23:00:04,890 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3563, 3.2167, 2.5744, 3.8990, 3.3923, 3.3906, 1.4592, 3.3365], + device='cuda:0'), covar=tensor([0.2060, 0.1508, 0.3469, 0.2296, 0.2816, 0.2172, 0.6398, 0.2825], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0218, 0.0252, 0.0306, 0.0303, 0.0253, 0.0273, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 23:00:16,200 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7294, 1.5999, 4.5947, 4.2928, 3.9815, 4.3361, 4.1981, 4.0300], + device='cuda:0'), covar=tensor([0.6658, 0.5620, 0.0934, 0.1739, 0.0994, 0.1223, 0.1112, 0.1409], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0305, 0.0402, 0.0408, 0.0347, 0.0400, 0.0312, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:00:18,050 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=49766.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:00:21,571 INFO [finetune.py:976] (0/7) Epoch 9, batch 3950, loss[loss=0.1506, simple_loss=0.222, pruned_loss=0.03962, over 4741.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2534, pruned_loss=0.06173, over 955144.83 frames. ], batch size: 27, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 23:00:27,207 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49778.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:00:33,309 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=49788.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 23:00:48,262 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9514, 1.3120, 1.1117, 1.6020, 1.4060, 1.4602, 1.2427, 2.4807], + device='cuda:0'), covar=tensor([0.0660, 0.0781, 0.0875, 0.1250, 0.0658, 0.0530, 0.0772, 0.0233], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 23:00:55,288 INFO [finetune.py:976] (0/7) Epoch 9, batch 4000, loss[loss=0.2632, simple_loss=0.3249, pruned_loss=0.1008, over 4820.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2543, pruned_loss=0.06285, over 955066.18 frames. ], batch size: 40, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 23:00:58,940 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=49827.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:01:05,132 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.602e+02 1.886e+02 2.314e+02 3.383e+02, threshold=3.771e+02, percent-clipped=0.0 +2023-04-26 23:01:10,119 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5696, 3.4436, 1.3931, 1.9514, 1.8833, 2.7366, 1.9861, 1.0399], + device='cuda:0'), covar=tensor([0.1265, 0.0948, 0.1566, 0.1203, 0.1049, 0.0837, 0.1450, 0.1941], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0249, 0.0140, 0.0122, 0.0135, 0.0153, 0.0117, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 23:01:11,410 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0562, 0.7497, 0.9553, 0.7901, 1.1914, 0.9285, 0.8490, 0.9438], + device='cuda:0'), covar=tensor([0.1884, 0.1677, 0.2210, 0.1898, 0.1120, 0.1509, 0.1900, 0.2403], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0323, 0.0354, 0.0297, 0.0334, 0.0321, 0.0308, 0.0355], + device='cuda:0'), out_proj_covar=tensor([6.4899e-05, 6.8508e-05, 7.6370e-05, 6.1401e-05, 6.9850e-05, 6.8983e-05, + 6.6165e-05, 7.6234e-05], device='cuda:0') +2023-04-26 23:01:44,301 INFO [finetune.py:976] (0/7) Epoch 9, batch 4050, loss[loss=0.2443, simple_loss=0.2992, pruned_loss=0.09468, over 4824.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2596, pruned_loss=0.06537, over 953854.25 frames. ], batch size: 33, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 23:02:14,664 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=49892.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:02:37,193 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7261, 2.5349, 1.8481, 1.6836, 1.2870, 1.3241, 1.8549, 1.2928], + device='cuda:0'), covar=tensor([0.1806, 0.1438, 0.1614, 0.2010, 0.2589, 0.2122, 0.1187, 0.2172], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0218, 0.0172, 0.0206, 0.0206, 0.0185, 0.0162, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 23:02:49,741 INFO [finetune.py:976] (0/7) Epoch 9, batch 4100, loss[loss=0.2214, simple_loss=0.2939, pruned_loss=0.07439, over 4912.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2615, pruned_loss=0.06531, over 954824.61 frames. ], batch size: 36, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 23:03:10,346 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.345e+02 1.694e+02 2.037e+02 2.558e+02 4.844e+02, threshold=4.074e+02, percent-clipped=3.0 +2023-04-26 23:03:13,471 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=49940.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:03:34,099 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7236, 2.0447, 1.7620, 1.8894, 1.5451, 1.7162, 1.7818, 1.4584], + device='cuda:0'), covar=tensor([0.1617, 0.1067, 0.0690, 0.1118, 0.2686, 0.1078, 0.1679, 0.2050], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0316, 0.0227, 0.0287, 0.0311, 0.0270, 0.0256, 0.0279], + device='cuda:0'), out_proj_covar=tensor([1.1939e-04, 1.2748e-04, 9.1110e-05, 1.1509e-04, 1.2768e-04, 1.0884e-04, + 1.0490e-04, 1.1212e-04], device='cuda:0') +2023-04-26 23:03:53,585 INFO [finetune.py:976] (0/7) Epoch 9, batch 4150, loss[loss=0.1529, simple_loss=0.2279, pruned_loss=0.03893, over 4778.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.2622, pruned_loss=0.06508, over 955476.75 frames. ], batch size: 29, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 23:03:54,930 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9532, 2.1774, 1.7645, 1.5814, 1.9889, 1.6966, 2.6935, 1.4101], + device='cuda:0'), covar=tensor([0.4092, 0.1703, 0.5044, 0.3467, 0.2029, 0.3008, 0.1612, 0.5405], + device='cuda:0'), in_proj_covar=tensor([0.0347, 0.0349, 0.0432, 0.0360, 0.0390, 0.0383, 0.0380, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:04:18,884 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-50000.pt +2023-04-26 23:04:33,306 INFO [finetune.py:976] (0/7) Epoch 9, batch 4200, loss[loss=0.187, simple_loss=0.2488, pruned_loss=0.06266, over 4931.00 frames. ], tot_loss[loss=0.1969, simple_loss=0.2633, pruned_loss=0.06525, over 956597.58 frames. ], batch size: 33, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 23:04:41,638 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.800e+02 2.150e+02 2.461e+02 7.133e+02, threshold=4.301e+02, percent-clipped=1.0 +2023-04-26 23:04:59,718 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3267, 1.4199, 1.6205, 1.7398, 1.6001, 1.8452, 1.8504, 1.7001], + device='cuda:0'), covar=tensor([0.4401, 0.7138, 0.6424, 0.5851, 0.7044, 0.9511, 0.6173, 0.6418], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0382, 0.0314, 0.0325, 0.0340, 0.0401, 0.0361, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 23:05:05,671 INFO [finetune.py:976] (0/7) Epoch 9, batch 4250, loss[loss=0.2687, simple_loss=0.3087, pruned_loss=0.1143, over 4895.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2622, pruned_loss=0.06562, over 956804.83 frames. ], batch size: 32, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 23:05:07,012 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3130, 1.4036, 3.9175, 3.6666, 3.5234, 3.7797, 3.7311, 3.4349], + device='cuda:0'), covar=tensor([0.6793, 0.5552, 0.1157, 0.1815, 0.1036, 0.1660, 0.1286, 0.1561], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0309, 0.0406, 0.0411, 0.0351, 0.0405, 0.0316, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:05:09,929 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50078.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:05:16,035 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50088.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:05:21,935 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5256, 1.3094, 1.6793, 1.6132, 1.3911, 1.1892, 1.2224, 0.7325], + device='cuda:0'), covar=tensor([0.0543, 0.0940, 0.0708, 0.0683, 0.0691, 0.1528, 0.0730, 0.1042], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0073, 0.0071, 0.0066, 0.0075, 0.0095, 0.0077, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 23:05:37,583 INFO [finetune.py:976] (0/7) Epoch 9, batch 4300, loss[loss=0.1686, simple_loss=0.2362, pruned_loss=0.05044, over 4913.00 frames. ], tot_loss[loss=0.1938, simple_loss=0.2587, pruned_loss=0.06444, over 958389.18 frames. ], batch size: 43, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 23:05:37,653 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50122.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:05:40,095 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=50126.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:05:41,357 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50128.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:05:46,443 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.729e+02 1.986e+02 2.496e+02 5.058e+02, threshold=3.971e+02, percent-clipped=3.0 +2023-04-26 23:05:47,120 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=50136.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:06:09,267 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-26 23:06:10,817 INFO [finetune.py:976] (0/7) Epoch 9, batch 4350, loss[loss=0.1914, simple_loss=0.2588, pruned_loss=0.06203, over 4924.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2558, pruned_loss=0.06371, over 958277.77 frames. ], batch size: 37, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 23:06:22,167 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50189.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:06:27,933 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50197.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:06:37,465 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.91 vs. limit=5.0 +2023-04-26 23:06:42,225 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6038, 1.5273, 0.8256, 1.2626, 1.9600, 1.4679, 1.3638, 1.3855], + device='cuda:0'), covar=tensor([0.0517, 0.0401, 0.0369, 0.0577, 0.0271, 0.0547, 0.0499, 0.0607], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0023, 0.0030, 0.0021, 0.0029, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0050, 0.0037, 0.0048, 0.0048, 0.0049], + device='cuda:0') +2023-04-26 23:06:44,561 INFO [finetune.py:976] (0/7) Epoch 9, batch 4400, loss[loss=0.2066, simple_loss=0.287, pruned_loss=0.06313, over 4907.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2577, pruned_loss=0.06446, over 957008.68 frames. ], batch size: 37, lr: 3.78e-03, grad_scale: 32.0 +2023-04-26 23:06:49,677 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-26 23:06:52,519 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.620e+02 1.881e+02 2.281e+02 3.883e+02, threshold=3.762e+02, percent-clipped=0.0 +2023-04-26 23:06:57,863 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1224, 1.2721, 1.4928, 1.6580, 1.5790, 1.7274, 1.6329, 1.5894], + device='cuda:0'), covar=tensor([0.4599, 0.6523, 0.5355, 0.5055, 0.6225, 0.8665, 0.6092, 0.5736], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0380, 0.0313, 0.0324, 0.0338, 0.0400, 0.0359, 0.0322], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 23:07:14,001 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50258.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 23:07:27,674 INFO [finetune.py:976] (0/7) Epoch 9, batch 4450, loss[loss=0.1605, simple_loss=0.2453, pruned_loss=0.03787, over 4871.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2611, pruned_loss=0.06534, over 952933.38 frames. ], batch size: 34, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:08:33,131 INFO [finetune.py:976] (0/7) Epoch 9, batch 4500, loss[loss=0.1936, simple_loss=0.255, pruned_loss=0.06614, over 4705.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2631, pruned_loss=0.06575, over 955110.45 frames. ], batch size: 23, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:08:43,900 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8367, 1.6898, 2.0789, 2.1855, 1.6689, 1.4108, 1.7556, 1.0876], + device='cuda:0'), covar=tensor([0.0797, 0.0829, 0.0840, 0.1027, 0.1004, 0.1451, 0.0884, 0.1111], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0073, 0.0072, 0.0067, 0.0076, 0.0096, 0.0078, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 23:08:46,790 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.697e+02 2.110e+02 2.509e+02 5.255e+02, threshold=4.219e+02, percent-clipped=3.0 +2023-04-26 23:08:57,707 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9942, 1.9887, 1.7219, 1.5478, 2.0924, 1.6205, 2.5614, 1.5761], + device='cuda:0'), covar=tensor([0.4237, 0.1913, 0.5295, 0.3394, 0.1932, 0.2836, 0.1472, 0.4804], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0351, 0.0435, 0.0363, 0.0391, 0.0384, 0.0381, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:09:12,329 INFO [finetune.py:976] (0/7) Epoch 9, batch 4550, loss[loss=0.2125, simple_loss=0.2814, pruned_loss=0.07185, over 4858.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2643, pruned_loss=0.06634, over 953853.10 frames. ], batch size: 44, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:09:56,474 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1199, 2.5143, 1.0477, 1.4320, 2.0275, 1.3045, 3.4098, 1.6924], + device='cuda:0'), covar=tensor([0.0630, 0.0599, 0.0730, 0.1268, 0.0481, 0.0974, 0.0301, 0.0652], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0051, 0.0052, 0.0078, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-26 23:10:20,888 INFO [finetune.py:976] (0/7) Epoch 9, batch 4600, loss[loss=0.1785, simple_loss=0.2434, pruned_loss=0.05687, over 4764.00 frames. ], tot_loss[loss=0.1966, simple_loss=0.2631, pruned_loss=0.06505, over 954703.23 frames. ], batch size: 28, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:10:20,978 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50422.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:10:39,535 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.116e+02 1.674e+02 1.954e+02 2.272e+02 3.604e+02, threshold=3.908e+02, percent-clipped=0.0 +2023-04-26 23:10:39,844 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-26 23:11:09,106 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3690, 1.7892, 2.1994, 2.7977, 2.2164, 1.8068, 1.6779, 1.9960], + device='cuda:0'), covar=tensor([0.3866, 0.3890, 0.1842, 0.3094, 0.3463, 0.3020, 0.4547, 0.2867], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0251, 0.0220, 0.0318, 0.0214, 0.0229, 0.0233, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 23:11:14,981 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=50470.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:11:16,115 INFO [finetune.py:976] (0/7) Epoch 9, batch 4650, loss[loss=0.1803, simple_loss=0.2513, pruned_loss=0.05463, over 4723.00 frames. ], tot_loss[loss=0.1939, simple_loss=0.26, pruned_loss=0.06392, over 954020.41 frames. ], batch size: 23, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:11:23,420 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50484.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:11:46,337 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5067, 1.8258, 1.6704, 2.1534, 2.0720, 2.0145, 1.7038, 4.3576], + device='cuda:0'), covar=tensor([0.0541, 0.0714, 0.0723, 0.1133, 0.0587, 0.0545, 0.0686, 0.0099], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 23:11:49,304 INFO [finetune.py:976] (0/7) Epoch 9, batch 4700, loss[loss=0.167, simple_loss=0.2446, pruned_loss=0.0447, over 4797.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2574, pruned_loss=0.0633, over 954162.74 frames. ], batch size: 29, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:11:57,183 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.640e+02 1.914e+02 2.343e+02 4.102e+02, threshold=3.828e+02, percent-clipped=1.0 +2023-04-26 23:12:04,111 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50546.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:12:08,321 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50553.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:12:22,116 INFO [finetune.py:976] (0/7) Epoch 9, batch 4750, loss[loss=0.1814, simple_loss=0.2489, pruned_loss=0.05698, over 4911.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2544, pruned_loss=0.06208, over 953241.02 frames. ], batch size: 36, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:12:54,331 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50607.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:13:16,244 INFO [finetune.py:976] (0/7) Epoch 9, batch 4800, loss[loss=0.2426, simple_loss=0.3085, pruned_loss=0.08834, over 4832.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2581, pruned_loss=0.06354, over 954212.43 frames. ], batch size: 33, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:13:30,153 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.707e+02 2.037e+02 2.400e+02 5.618e+02, threshold=4.074e+02, percent-clipped=3.0 +2023-04-26 23:13:44,330 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-26 23:13:55,580 INFO [finetune.py:976] (0/7) Epoch 9, batch 4850, loss[loss=0.2634, simple_loss=0.323, pruned_loss=0.1019, over 4817.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2614, pruned_loss=0.06456, over 954417.18 frames. ], batch size: 33, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:14:16,525 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-26 23:14:28,052 INFO [finetune.py:976] (0/7) Epoch 9, batch 4900, loss[loss=0.2217, simple_loss=0.2873, pruned_loss=0.07801, over 4204.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2626, pruned_loss=0.06537, over 952666.28 frames. ], batch size: 65, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:14:36,903 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.087e+02 1.673e+02 1.942e+02 2.428e+02 3.700e+02, threshold=3.884e+02, percent-clipped=0.0 +2023-04-26 23:15:14,088 INFO [finetune.py:976] (0/7) Epoch 9, batch 4950, loss[loss=0.2068, simple_loss=0.2711, pruned_loss=0.07123, over 4760.00 frames. ], tot_loss[loss=0.1985, simple_loss=0.2647, pruned_loss=0.06614, over 954566.96 frames. ], batch size: 54, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:15:33,259 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50782.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:15:34,473 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50784.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:15:42,094 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6327, 1.1309, 1.7153, 2.0887, 1.7276, 1.6098, 1.6559, 1.6682], + device='cuda:0'), covar=tensor([0.6524, 0.8951, 0.9007, 0.8777, 0.8224, 1.1312, 1.1242, 0.9940], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0420, 0.0504, 0.0522, 0.0439, 0.0458, 0.0468, 0.0466], + device='cuda:0'), out_proj_covar=tensor([9.9746e-05, 1.0420e-04, 1.1382e-04, 1.2409e-04, 1.0656e-04, 1.1086e-04, + 1.1256e-04, 1.1272e-04], device='cuda:0') +2023-04-26 23:15:51,992 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0097, 1.3690, 1.1962, 1.5972, 1.4528, 1.4079, 1.2752, 2.4022], + device='cuda:0'), covar=tensor([0.0635, 0.0728, 0.0790, 0.1194, 0.0618, 0.0567, 0.0749, 0.0245], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 23:16:02,469 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50808.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 23:16:11,333 INFO [finetune.py:976] (0/7) Epoch 9, batch 5000, loss[loss=0.2321, simple_loss=0.2866, pruned_loss=0.08883, over 4841.00 frames. ], tot_loss[loss=0.1973, simple_loss=0.2633, pruned_loss=0.0657, over 954565.30 frames. ], batch size: 49, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:16:19,872 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=50832.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:16:21,616 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.711e+02 2.099e+02 2.479e+02 5.783e+02, threshold=4.198e+02, percent-clipped=3.0 +2023-04-26 23:16:26,665 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50843.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:16:32,710 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=50853.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:16:42,399 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=50869.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 23:16:44,512 INFO [finetune.py:976] (0/7) Epoch 9, batch 5050, loss[loss=0.177, simple_loss=0.2467, pruned_loss=0.05368, over 4877.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2598, pruned_loss=0.06467, over 956336.93 frames. ], batch size: 34, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:17:04,640 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=50901.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:17:05,251 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=50902.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:17:17,313 INFO [finetune.py:976] (0/7) Epoch 9, batch 5100, loss[loss=0.1427, simple_loss=0.209, pruned_loss=0.03817, over 4773.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2559, pruned_loss=0.06302, over 955105.56 frames. ], batch size: 26, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:17:26,166 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.643e+02 1.891e+02 2.439e+02 4.473e+02, threshold=3.781e+02, percent-clipped=2.0 +2023-04-26 23:17:34,489 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=50946.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:17:50,266 INFO [finetune.py:976] (0/7) Epoch 9, batch 5150, loss[loss=0.1787, simple_loss=0.2316, pruned_loss=0.06292, over 4213.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2551, pruned_loss=0.06281, over 955086.43 frames. ], batch size: 18, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:17:52,188 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1153, 1.5279, 1.2507, 1.6775, 1.5490, 1.7544, 1.2895, 3.2651], + device='cuda:0'), covar=tensor([0.0624, 0.0740, 0.0800, 0.1164, 0.0631, 0.0563, 0.0746, 0.0163], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 23:18:16,631 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0952, 1.4871, 1.9371, 2.3215, 1.9273, 1.4710, 1.1519, 1.6492], + device='cuda:0'), covar=tensor([0.3915, 0.4176, 0.1913, 0.2764, 0.3143, 0.3231, 0.4827, 0.2750], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0248, 0.0218, 0.0315, 0.0212, 0.0227, 0.0231, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 23:18:25,486 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51007.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:18:40,596 INFO [finetune.py:976] (0/7) Epoch 9, batch 5200, loss[loss=0.2057, simple_loss=0.2856, pruned_loss=0.06286, over 4815.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.259, pruned_loss=0.06406, over 956274.08 frames. ], batch size: 51, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:18:49,051 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.109e+02 1.700e+02 2.036e+02 2.415e+02 4.035e+02, threshold=4.072e+02, percent-clipped=2.0 +2023-04-26 23:18:50,798 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5239, 1.4528, 1.8796, 1.7722, 1.4416, 1.1959, 1.5083, 1.0416], + device='cuda:0'), covar=tensor([0.0766, 0.0903, 0.0466, 0.0831, 0.0823, 0.1448, 0.0820, 0.0984], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0073, 0.0071, 0.0067, 0.0076, 0.0096, 0.0078, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 23:19:08,578 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51062.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:19:14,487 INFO [finetune.py:976] (0/7) Epoch 9, batch 5250, loss[loss=0.1723, simple_loss=0.2553, pruned_loss=0.04467, over 4805.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2605, pruned_loss=0.0641, over 956373.28 frames. ], batch size: 45, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:19:33,978 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-26 23:19:47,750 INFO [finetune.py:976] (0/7) Epoch 9, batch 5300, loss[loss=0.1877, simple_loss=0.2558, pruned_loss=0.05985, over 4756.00 frames. ], tot_loss[loss=0.1959, simple_loss=0.2622, pruned_loss=0.0648, over 954804.22 frames. ], batch size: 28, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:19:48,485 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51123.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:19:56,071 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.811e+02 2.045e+02 2.583e+02 4.950e+02, threshold=4.090e+02, percent-clipped=1.0 +2023-04-26 23:19:57,977 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51138.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:20:00,454 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5007, 3.2529, 2.6193, 2.8518, 2.3416, 2.6694, 2.7622, 2.0484], + device='cuda:0'), covar=tensor([0.2340, 0.1363, 0.0778, 0.1356, 0.3215, 0.1341, 0.2082, 0.2679], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0316, 0.0226, 0.0288, 0.0316, 0.0272, 0.0257, 0.0278], + device='cuda:0'), out_proj_covar=tensor([1.1970e-04, 1.2744e-04, 9.0832e-05, 1.1566e-04, 1.2936e-04, 1.0961e-04, + 1.0509e-04, 1.1170e-04], device='cuda:0') +2023-04-26 23:20:15,868 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51164.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 23:20:20,663 INFO [finetune.py:976] (0/7) Epoch 9, batch 5350, loss[loss=0.2022, simple_loss=0.2648, pruned_loss=0.06986, over 4908.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2625, pruned_loss=0.06485, over 954128.14 frames. ], batch size: 37, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:20:20,769 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51172.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 23:20:23,248 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-26 23:20:25,750 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-26 23:20:46,713 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51202.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:21:10,254 INFO [finetune.py:976] (0/7) Epoch 9, batch 5400, loss[loss=0.1691, simple_loss=0.2305, pruned_loss=0.05387, over 4337.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2603, pruned_loss=0.06451, over 954769.01 frames. ], batch size: 65, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:21:21,591 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51233.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 23:21:22,681 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.244e+02 1.662e+02 1.922e+02 2.270e+02 4.708e+02, threshold=3.844e+02, percent-clipped=3.0 +2023-04-26 23:21:43,668 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=51250.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:21:51,900 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8628, 1.9820, 1.9329, 1.4133, 2.0317, 1.6946, 2.6177, 1.6371], + device='cuda:0'), covar=tensor([0.4097, 0.1686, 0.4861, 0.3382, 0.1881, 0.2612, 0.1679, 0.4435], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0347, 0.0429, 0.0361, 0.0386, 0.0382, 0.0379, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:21:55,979 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-04-26 23:22:14,587 INFO [finetune.py:976] (0/7) Epoch 9, batch 5450, loss[loss=0.1555, simple_loss=0.2237, pruned_loss=0.04368, over 4909.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2561, pruned_loss=0.06251, over 955998.01 frames. ], batch size: 43, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:22:47,573 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51302.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:23:09,405 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6545, 1.1895, 1.7237, 2.1033, 1.7622, 1.6566, 1.6904, 1.7079], + device='cuda:0'), covar=tensor([0.6061, 0.8857, 0.8530, 0.8620, 0.8128, 1.0277, 1.0276, 0.9452], + device='cuda:0'), in_proj_covar=tensor([0.0407, 0.0416, 0.0501, 0.0517, 0.0435, 0.0454, 0.0466, 0.0463], + device='cuda:0'), out_proj_covar=tensor([9.9149e-05, 1.0310e-04, 1.1292e-04, 1.2302e-04, 1.0560e-04, 1.0998e-04, + 1.1201e-04, 1.1198e-04], device='cuda:0') +2023-04-26 23:23:19,200 INFO [finetune.py:976] (0/7) Epoch 9, batch 5500, loss[loss=0.1831, simple_loss=0.2543, pruned_loss=0.05602, over 4839.00 frames. ], tot_loss[loss=0.1879, simple_loss=0.2532, pruned_loss=0.06133, over 955866.62 frames. ], batch size: 47, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:23:32,910 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.202e+01 1.588e+02 1.902e+02 2.243e+02 3.887e+02, threshold=3.804e+02, percent-clipped=1.0 +2023-04-26 23:24:14,278 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6752, 1.3492, 1.8572, 2.1733, 1.8158, 1.6715, 1.7464, 1.7578], + device='cuda:0'), covar=tensor([0.5980, 0.7879, 0.8602, 0.7855, 0.7468, 0.9856, 0.9982, 0.9019], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0418, 0.0504, 0.0521, 0.0437, 0.0456, 0.0468, 0.0465], + device='cuda:0'), out_proj_covar=tensor([9.9642e-05, 1.0354e-04, 1.1359e-04, 1.2383e-04, 1.0618e-04, 1.1048e-04, + 1.1257e-04, 1.1243e-04], device='cuda:0') +2023-04-26 23:24:14,746 INFO [finetune.py:976] (0/7) Epoch 9, batch 5550, loss[loss=0.1915, simple_loss=0.2715, pruned_loss=0.05577, over 4792.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2578, pruned_loss=0.0641, over 955502.12 frames. ], batch size: 29, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:24:20,973 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51382.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:24:26,992 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51392.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:24:28,355 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-26 23:24:32,670 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-26 23:24:42,926 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51418.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:24:45,252 INFO [finetune.py:976] (0/7) Epoch 9, batch 5600, loss[loss=0.195, simple_loss=0.2625, pruned_loss=0.06377, over 4779.00 frames. ], tot_loss[loss=0.1953, simple_loss=0.2611, pruned_loss=0.06475, over 953615.02 frames. ], batch size: 29, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:24:52,678 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.274e+02 1.747e+02 2.120e+02 2.551e+02 6.497e+02, threshold=4.239e+02, percent-clipped=3.0 +2023-04-26 23:24:54,539 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51438.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:24:57,453 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51443.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:25:03,251 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51453.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:25:06,019 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6909, 1.6243, 2.1322, 2.0700, 1.5949, 1.3708, 1.7019, 1.0872], + device='cuda:0'), covar=tensor([0.0840, 0.1123, 0.0553, 0.1036, 0.0948, 0.1357, 0.1006, 0.1087], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0073, 0.0072, 0.0068, 0.0077, 0.0097, 0.0079, 0.0075], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 23:25:10,021 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51464.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:25:15,157 INFO [finetune.py:976] (0/7) Epoch 9, batch 5650, loss[loss=0.2127, simple_loss=0.2913, pruned_loss=0.06709, over 4811.00 frames. ], tot_loss[loss=0.1986, simple_loss=0.265, pruned_loss=0.06608, over 952660.79 frames. ], batch size: 51, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:25:23,727 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=51486.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:25:29,050 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51495.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:25:32,702 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-26 23:25:39,094 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=51512.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 23:25:42,185 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0942, 2.1263, 2.1654, 1.7478, 2.3362, 1.9835, 2.9030, 1.8078], + device='cuda:0'), covar=tensor([0.3673, 0.1666, 0.3761, 0.2745, 0.1418, 0.2157, 0.1562, 0.4131], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0345, 0.0426, 0.0360, 0.0384, 0.0381, 0.0376, 0.0418], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:25:45,276 INFO [finetune.py:976] (0/7) Epoch 9, batch 5700, loss[loss=0.174, simple_loss=0.2304, pruned_loss=0.05882, over 4286.00 frames. ], tot_loss[loss=0.1954, simple_loss=0.26, pruned_loss=0.06543, over 933980.37 frames. ], batch size: 18, lr: 3.77e-03, grad_scale: 32.0 +2023-04-26 23:25:48,916 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51528.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:25:53,110 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.028e+02 1.631e+02 1.982e+02 2.330e+02 4.156e+02, threshold=3.963e+02, percent-clipped=0.0 +2023-04-26 23:26:01,791 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-9.pt +2023-04-26 23:26:16,105 INFO [finetune.py:976] (0/7) Epoch 10, batch 0, loss[loss=0.2178, simple_loss=0.2803, pruned_loss=0.0777, over 4894.00 frames. ], tot_loss[loss=0.2178, simple_loss=0.2803, pruned_loss=0.0777, over 4894.00 frames. ], batch size: 37, lr: 3.76e-03, grad_scale: 32.0 +2023-04-26 23:26:16,107 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-26 23:26:31,821 INFO [finetune.py:1010] (0/7) Epoch 10, validation: loss=0.1558, simple_loss=0.2282, pruned_loss=0.04164, over 2265189.00 frames. +2023-04-26 23:26:31,821 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-26 23:26:37,654 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51556.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:27:05,702 INFO [finetune.py:976] (0/7) Epoch 10, batch 50, loss[loss=0.1784, simple_loss=0.2425, pruned_loss=0.05711, over 4852.00 frames. ], tot_loss[loss=0.1982, simple_loss=0.2635, pruned_loss=0.06644, over 216255.22 frames. ], batch size: 44, lr: 3.76e-03, grad_scale: 32.0 +2023-04-26 23:27:08,560 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51602.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:27:24,337 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2635, 1.7434, 2.0549, 2.6397, 2.1082, 1.6732, 1.6804, 1.9367], + device='cuda:0'), covar=tensor([0.2835, 0.3394, 0.1713, 0.2516, 0.3033, 0.2764, 0.4533, 0.2732], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0251, 0.0220, 0.0318, 0.0214, 0.0227, 0.0233, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 23:27:31,938 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.709e+02 2.017e+02 2.493e+02 1.011e+03, threshold=4.035e+02, percent-clipped=6.0 +2023-04-26 23:27:43,484 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7151, 1.4318, 1.9730, 2.1896, 1.8805, 1.7669, 1.8888, 1.8191], + device='cuda:0'), covar=tensor([0.5967, 0.7974, 0.7926, 0.8531, 0.7786, 0.9474, 0.9138, 0.9115], + device='cuda:0'), in_proj_covar=tensor([0.0407, 0.0415, 0.0501, 0.0518, 0.0436, 0.0454, 0.0466, 0.0463], + device='cuda:0'), out_proj_covar=tensor([9.9039e-05, 1.0286e-04, 1.1289e-04, 1.2325e-04, 1.0586e-04, 1.0995e-04, + 1.1191e-04, 1.1190e-04], device='cuda:0') +2023-04-26 23:27:45,647 INFO [finetune.py:976] (0/7) Epoch 10, batch 100, loss[loss=0.1597, simple_loss=0.2285, pruned_loss=0.04547, over 4890.00 frames. ], tot_loss[loss=0.1926, simple_loss=0.2568, pruned_loss=0.06419, over 378521.74 frames. ], batch size: 32, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:27:46,797 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=51650.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:28:38,565 INFO [finetune.py:976] (0/7) Epoch 10, batch 150, loss[loss=0.2096, simple_loss=0.2674, pruned_loss=0.07589, over 4895.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2522, pruned_loss=0.06255, over 506763.44 frames. ], batch size: 43, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:29:04,456 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51718.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:29:20,965 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.275e+02 1.676e+02 2.023e+02 2.458e+02 4.768e+02, threshold=4.046e+02, percent-clipped=1.0 +2023-04-26 23:29:22,211 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51738.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:29:29,237 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=51747.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:29:29,795 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51748.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:29:30,342 INFO [finetune.py:976] (0/7) Epoch 10, batch 200, loss[loss=0.1905, simple_loss=0.252, pruned_loss=0.06455, over 4921.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2527, pruned_loss=0.06321, over 607462.04 frames. ], batch size: 38, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:29:36,804 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3206, 2.5703, 1.9086, 2.0688, 2.3354, 1.8935, 3.3169, 1.5246], + device='cuda:0'), covar=tensor([0.4191, 0.1848, 0.4497, 0.3352, 0.2361, 0.3043, 0.1606, 0.5065], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0348, 0.0428, 0.0361, 0.0387, 0.0382, 0.0379, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:29:42,677 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=51766.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:29:42,880 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-26 23:29:53,087 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6302, 2.3174, 1.4919, 1.7733, 1.1905, 1.2379, 1.5957, 1.1338], + device='cuda:0'), covar=tensor([0.2244, 0.1687, 0.2065, 0.2078, 0.3094, 0.2604, 0.1300, 0.2445], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0216, 0.0171, 0.0205, 0.0205, 0.0184, 0.0160, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 23:30:04,062 INFO [finetune.py:976] (0/7) Epoch 10, batch 250, loss[loss=0.2349, simple_loss=0.2994, pruned_loss=0.08518, over 4834.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2551, pruned_loss=0.06256, over 683735.62 frames. ], batch size: 45, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:30:11,185 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=51808.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:30:23,879 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=51828.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 23:30:28,669 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.252e+02 1.711e+02 1.990e+02 2.532e+02 5.377e+02, threshold=3.981e+02, percent-clipped=2.0 +2023-04-26 23:30:37,590 INFO [finetune.py:976] (0/7) Epoch 10, batch 300, loss[loss=0.2261, simple_loss=0.2906, pruned_loss=0.08079, over 4840.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2578, pruned_loss=0.06317, over 745098.51 frames. ], batch size: 47, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:30:37,743 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3673, 1.5828, 1.6668, 1.8213, 1.7196, 1.8024, 1.7836, 1.7533], + device='cuda:0'), covar=tensor([0.4505, 0.7335, 0.5917, 0.5605, 0.6734, 0.9418, 0.7015, 0.6635], + device='cuda:0'), in_proj_covar=tensor([0.0326, 0.0385, 0.0317, 0.0327, 0.0341, 0.0403, 0.0361, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 23:30:38,887 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=51851.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:30:56,474 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=51876.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 23:31:10,844 INFO [finetune.py:976] (0/7) Epoch 10, batch 350, loss[loss=0.2224, simple_loss=0.2746, pruned_loss=0.08509, over 4768.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2592, pruned_loss=0.06319, over 790922.42 frames. ], batch size: 26, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:31:14,592 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0516, 1.4905, 1.9344, 2.2039, 1.8612, 1.5015, 1.0961, 1.6300], + device='cuda:0'), covar=tensor([0.4054, 0.4195, 0.1894, 0.2958, 0.3344, 0.3261, 0.5018, 0.2912], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0251, 0.0221, 0.0319, 0.0214, 0.0228, 0.0234, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 23:31:19,264 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4949, 1.3038, 1.7607, 1.7328, 1.3526, 1.1679, 1.3561, 0.8795], + device='cuda:0'), covar=tensor([0.0634, 0.0697, 0.0449, 0.0558, 0.0768, 0.1192, 0.0640, 0.0805], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0073, 0.0071, 0.0067, 0.0076, 0.0096, 0.0078, 0.0074], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 23:31:41,424 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.954e+01 1.688e+02 1.992e+02 2.453e+02 5.822e+02, threshold=3.984e+02, percent-clipped=3.0 +2023-04-26 23:31:59,606 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5779, 0.6930, 1.3421, 1.9655, 1.6745, 1.4808, 1.4489, 1.5310], + device='cuda:0'), covar=tensor([0.5573, 0.7127, 0.7666, 0.7966, 0.6814, 0.8732, 0.8333, 0.8178], + device='cuda:0'), in_proj_covar=tensor([0.0410, 0.0418, 0.0504, 0.0523, 0.0439, 0.0458, 0.0469, 0.0468], + device='cuda:0'), out_proj_covar=tensor([9.9994e-05, 1.0369e-04, 1.1373e-04, 1.2430e-04, 1.0655e-04, 1.1081e-04, + 1.1272e-04, 1.1307e-04], device='cuda:0') +2023-04-26 23:32:00,720 INFO [finetune.py:976] (0/7) Epoch 10, batch 400, loss[loss=0.2579, simple_loss=0.3128, pruned_loss=0.1016, over 4822.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2604, pruned_loss=0.06336, over 826918.05 frames. ], batch size: 39, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:32:27,818 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5499, 1.7410, 1.3237, 1.0577, 1.1718, 1.1466, 1.3290, 1.0944], + device='cuda:0'), covar=tensor([0.1772, 0.1356, 0.1703, 0.1972, 0.2525, 0.2211, 0.1210, 0.2222], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0216, 0.0171, 0.0205, 0.0205, 0.0184, 0.0160, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 23:32:51,604 INFO [finetune.py:976] (0/7) Epoch 10, batch 450, loss[loss=0.196, simple_loss=0.2704, pruned_loss=0.06081, over 4895.00 frames. ], tot_loss[loss=0.1932, simple_loss=0.26, pruned_loss=0.06321, over 856520.23 frames. ], batch size: 35, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:32:52,348 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-52000.pt +2023-04-26 23:33:45,356 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.582e+02 1.952e+02 2.304e+02 4.077e+02, threshold=3.905e+02, percent-clipped=1.0 +2023-04-26 23:33:46,670 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52038.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:33:53,321 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52040.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:33:58,107 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52048.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:34:04,123 INFO [finetune.py:976] (0/7) Epoch 10, batch 500, loss[loss=0.1618, simple_loss=0.2293, pruned_loss=0.04717, over 4856.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2586, pruned_loss=0.06314, over 879519.15 frames. ], batch size: 31, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:34:51,737 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=52086.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:35:03,591 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=52096.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:35:09,990 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7437, 1.7306, 4.4019, 4.1090, 3.8496, 4.1699, 4.1042, 3.8963], + device='cuda:0'), covar=tensor([0.6637, 0.5496, 0.0972, 0.1760, 0.1052, 0.1544, 0.1036, 0.1463], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0307, 0.0406, 0.0410, 0.0349, 0.0404, 0.0314, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:35:11,182 INFO [finetune.py:976] (0/7) Epoch 10, batch 550, loss[loss=0.1671, simple_loss=0.2342, pruned_loss=0.05002, over 4715.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2558, pruned_loss=0.06264, over 896836.46 frames. ], batch size: 23, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:35:12,518 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52101.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:35:13,633 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52103.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:35:22,454 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4033, 1.7398, 1.2241, 0.9749, 1.0885, 1.0497, 1.2151, 1.0110], + device='cuda:0'), covar=tensor([0.1942, 0.1350, 0.2049, 0.2137, 0.2802, 0.2497, 0.1233, 0.2360], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0216, 0.0170, 0.0205, 0.0205, 0.0184, 0.0161, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 23:35:32,444 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7449, 3.5109, 2.8048, 4.2834, 3.7037, 3.6287, 1.6902, 3.6879], + device='cuda:0'), covar=tensor([0.1707, 0.1608, 0.3557, 0.1685, 0.3654, 0.2182, 0.5894, 0.2539], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0216, 0.0250, 0.0304, 0.0301, 0.0251, 0.0270, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 23:35:34,841 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7586, 2.9636, 2.6046, 2.6556, 3.0271, 2.8151, 3.9658, 2.4992], + device='cuda:0'), covar=tensor([0.3608, 0.1908, 0.3040, 0.2909, 0.1709, 0.2309, 0.1043, 0.3264], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0350, 0.0428, 0.0363, 0.0390, 0.0385, 0.0381, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:35:58,002 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6671, 2.1886, 1.5350, 1.3551, 1.2653, 1.2494, 1.4676, 1.1356], + device='cuda:0'), covar=tensor([0.1664, 0.1334, 0.1584, 0.1868, 0.2539, 0.1995, 0.1193, 0.2119], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0216, 0.0170, 0.0205, 0.0205, 0.0184, 0.0161, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 23:36:05,085 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.854e+02 2.167e+02 2.503e+02 5.657e+02, threshold=4.334e+02, percent-clipped=4.0 +2023-04-26 23:36:18,684 INFO [finetune.py:976] (0/7) Epoch 10, batch 600, loss[loss=0.1888, simple_loss=0.2513, pruned_loss=0.06312, over 4913.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2572, pruned_loss=0.06386, over 909380.95 frames. ], batch size: 36, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:36:19,980 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52151.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:36:49,107 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-26 23:37:16,289 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52193.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:37:25,228 INFO [finetune.py:976] (0/7) Epoch 10, batch 650, loss[loss=0.2349, simple_loss=0.2849, pruned_loss=0.09248, over 4837.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.261, pruned_loss=0.0651, over 921105.49 frames. ], batch size: 30, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:37:25,288 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=52199.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:38:17,626 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.623e+02 1.914e+02 2.334e+02 8.066e+02, threshold=3.828e+02, percent-clipped=2.0 +2023-04-26 23:38:18,945 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1137, 1.5069, 1.3133, 1.6682, 1.5717, 1.7721, 1.3765, 3.2525], + device='cuda:0'), covar=tensor([0.0655, 0.0766, 0.0808, 0.1220, 0.0660, 0.0541, 0.0778, 0.0153], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0059], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-26 23:38:30,843 INFO [finetune.py:976] (0/7) Epoch 10, batch 700, loss[loss=0.235, simple_loss=0.2953, pruned_loss=0.08733, over 4805.00 frames. ], tot_loss[loss=0.1957, simple_loss=0.2619, pruned_loss=0.06471, over 931228.04 frames. ], batch size: 33, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:38:38,126 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-26 23:38:39,786 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52254.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:39:44,471 INFO [finetune.py:976] (0/7) Epoch 10, batch 750, loss[loss=0.2146, simple_loss=0.2722, pruned_loss=0.07849, over 4880.00 frames. ], tot_loss[loss=0.196, simple_loss=0.2625, pruned_loss=0.06479, over 937475.16 frames. ], batch size: 32, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:39:55,567 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0457, 2.6284, 2.0940, 1.9360, 1.4363, 1.3937, 2.1390, 1.4301], + device='cuda:0'), covar=tensor([0.1817, 0.1678, 0.1520, 0.1981, 0.2608, 0.2109, 0.1088, 0.2191], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0218, 0.0171, 0.0206, 0.0207, 0.0185, 0.0162, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 23:39:56,769 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52312.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:40:13,561 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.225e+02 1.731e+02 1.996e+02 2.532e+02 4.524e+02, threshold=3.992e+02, percent-clipped=2.0 +2023-04-26 23:40:13,721 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.0802, 2.3973, 2.1826, 2.3767, 2.1259, 2.3239, 2.4069, 2.2914], + device='cuda:0'), covar=tensor([0.4632, 0.6733, 0.6182, 0.5315, 0.6277, 0.8681, 0.6431, 0.5918], + device='cuda:0'), in_proj_covar=tensor([0.0324, 0.0382, 0.0316, 0.0325, 0.0339, 0.0402, 0.0360, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 23:40:22,481 INFO [finetune.py:976] (0/7) Epoch 10, batch 800, loss[loss=0.1689, simple_loss=0.2372, pruned_loss=0.05027, over 4851.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2618, pruned_loss=0.06405, over 942183.10 frames. ], batch size: 44, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:40:27,730 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-26 23:40:37,965 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52373.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:40:54,334 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52396.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:40:56,135 INFO [finetune.py:976] (0/7) Epoch 10, batch 850, loss[loss=0.1854, simple_loss=0.243, pruned_loss=0.06383, over 4851.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2589, pruned_loss=0.06306, over 947310.96 frames. ], batch size: 44, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:40:58,776 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52403.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:41:30,358 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5903, 1.3841, 4.3055, 4.0736, 3.7117, 3.9827, 3.9078, 3.7051], + device='cuda:0'), covar=tensor([0.6850, 0.5926, 0.0945, 0.1422, 0.1018, 0.1670, 0.1592, 0.1555], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0308, 0.0407, 0.0410, 0.0349, 0.0405, 0.0314, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:41:30,886 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.587e+02 1.831e+02 2.127e+02 6.660e+02, threshold=3.662e+02, percent-clipped=3.0 +2023-04-26 23:41:33,554 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-26 23:41:33,602 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-26 23:41:41,245 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-26 23:41:44,731 INFO [finetune.py:976] (0/7) Epoch 10, batch 900, loss[loss=0.1464, simple_loss=0.2219, pruned_loss=0.03542, over 4855.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2563, pruned_loss=0.06261, over 949195.40 frames. ], batch size: 44, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:41:51,261 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=52451.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:42:39,323 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.34 vs. limit=5.0 +2023-04-26 23:42:48,729 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9061, 2.6270, 2.0625, 2.4017, 1.7024, 2.0723, 2.1310, 1.5666], + device='cuda:0'), covar=tensor([0.2267, 0.1289, 0.0877, 0.1333, 0.3372, 0.1334, 0.1983, 0.3181], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0314, 0.0226, 0.0287, 0.0316, 0.0271, 0.0256, 0.0278], + device='cuda:0'), out_proj_covar=tensor([1.1901e-04, 1.2666e-04, 9.0749e-05, 1.1522e-04, 1.2934e-04, 1.0904e-04, + 1.0460e-04, 1.1162e-04], device='cuda:0') +2023-04-26 23:42:49,241 INFO [finetune.py:976] (0/7) Epoch 10, batch 950, loss[loss=0.191, simple_loss=0.2582, pruned_loss=0.06186, over 4927.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2557, pruned_loss=0.06273, over 951496.91 frames. ], batch size: 38, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:43:00,207 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 23:43:41,296 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.114e+02 1.638e+02 1.935e+02 2.436e+02 4.596e+02, threshold=3.871e+02, percent-clipped=3.0 +2023-04-26 23:43:55,593 INFO [finetune.py:976] (0/7) Epoch 10, batch 1000, loss[loss=0.1742, simple_loss=0.2603, pruned_loss=0.04401, over 4914.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2588, pruned_loss=0.06373, over 953729.75 frames. ], batch size: 36, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:43:55,669 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52549.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:44:05,053 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52555.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:44:07,100 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-26 23:44:15,373 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.63 vs. limit=5.0 +2023-04-26 23:44:28,282 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-26 23:44:58,593 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9346, 1.2739, 1.5714, 2.3876, 2.4785, 1.8222, 1.4965, 1.9389], + device='cuda:0'), covar=tensor([0.0947, 0.1979, 0.1092, 0.0585, 0.0572, 0.1019, 0.1057, 0.0764], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0204, 0.0183, 0.0175, 0.0177, 0.0188, 0.0160, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:45:07,086 INFO [finetune.py:976] (0/7) Epoch 10, batch 1050, loss[loss=0.2061, simple_loss=0.2801, pruned_loss=0.06607, over 4912.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2607, pruned_loss=0.06393, over 953678.57 frames. ], batch size: 37, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:45:08,118 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 23:45:23,401 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52616.0, num_to_drop=1, layers_to_drop={3} +2023-04-26 23:45:52,966 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.267e+02 1.739e+02 2.075e+02 2.346e+02 3.891e+02, threshold=4.149e+02, percent-clipped=1.0 +2023-04-26 23:46:13,538 INFO [finetune.py:976] (0/7) Epoch 10, batch 1100, loss[loss=0.2237, simple_loss=0.2866, pruned_loss=0.08042, over 4891.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.261, pruned_loss=0.06386, over 955401.59 frames. ], batch size: 43, lr: 3.76e-03, grad_scale: 16.0 +2023-04-26 23:46:36,540 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52668.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:46:52,298 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-26 23:46:55,074 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6280, 2.2929, 1.7323, 1.5718, 1.1692, 1.1897, 1.7781, 1.1409], + device='cuda:0'), covar=tensor([0.1779, 0.1541, 0.1810, 0.2124, 0.2705, 0.2336, 0.1168, 0.2353], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0216, 0.0170, 0.0204, 0.0204, 0.0184, 0.0160, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 23:46:55,646 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52696.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:46:57,844 INFO [finetune.py:976] (0/7) Epoch 10, batch 1150, loss[loss=0.1766, simple_loss=0.2441, pruned_loss=0.05451, over 4881.00 frames. ], tot_loss[loss=0.1961, simple_loss=0.2625, pruned_loss=0.06484, over 954673.53 frames. ], batch size: 35, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:46:58,112 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-26 23:47:19,013 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-26 23:47:21,724 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.582e+02 1.906e+02 2.303e+02 4.186e+02, threshold=3.812e+02, percent-clipped=1.0 +2023-04-26 23:47:33,675 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=52744.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:47:43,257 INFO [finetune.py:976] (0/7) Epoch 10, batch 1200, loss[loss=0.1718, simple_loss=0.2405, pruned_loss=0.05157, over 4744.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.2613, pruned_loss=0.06454, over 956110.35 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:47:45,175 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=52752.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:47:54,416 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9000, 1.0627, 1.5395, 1.6731, 1.5873, 1.7703, 1.6028, 1.5452], + device='cuda:0'), covar=tensor([0.4061, 0.5906, 0.5196, 0.4980, 0.6177, 0.8597, 0.5540, 0.5857], + device='cuda:0'), in_proj_covar=tensor([0.0325, 0.0381, 0.0316, 0.0325, 0.0340, 0.0401, 0.0360, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 23:48:19,654 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1289, 1.6025, 2.0566, 2.6509, 1.9799, 1.5943, 1.5022, 1.8736], + device='cuda:0'), covar=tensor([0.3381, 0.3661, 0.1772, 0.2287, 0.2859, 0.2948, 0.4530, 0.2299], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0250, 0.0219, 0.0317, 0.0213, 0.0227, 0.0233, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 23:48:48,805 INFO [finetune.py:976] (0/7) Epoch 10, batch 1250, loss[loss=0.1704, simple_loss=0.2392, pruned_loss=0.05081, over 4786.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.259, pruned_loss=0.06393, over 956348.98 frames. ], batch size: 29, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:49:09,938 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=52813.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:49:12,340 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4378, 3.6297, 0.8996, 1.7865, 1.8854, 2.4356, 1.9746, 1.0252], + device='cuda:0'), covar=tensor([0.1539, 0.0954, 0.2113, 0.1428, 0.1154, 0.1134, 0.1554, 0.2073], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0248, 0.0141, 0.0122, 0.0135, 0.0154, 0.0119, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 23:49:34,636 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.609e+02 1.926e+02 2.217e+02 3.154e+02, threshold=3.851e+02, percent-clipped=0.0 +2023-04-26 23:49:48,760 INFO [finetune.py:976] (0/7) Epoch 10, batch 1300, loss[loss=0.1841, simple_loss=0.2504, pruned_loss=0.05892, over 4801.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2543, pruned_loss=0.06158, over 957174.80 frames. ], batch size: 45, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:49:48,870 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52849.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:49:49,571 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-26 23:50:20,538 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=52897.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:50:21,745 INFO [finetune.py:976] (0/7) Epoch 10, batch 1350, loss[loss=0.2134, simple_loss=0.2875, pruned_loss=0.06971, over 4825.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2552, pruned_loss=0.06183, over 957433.32 frames. ], batch size: 40, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:50:31,131 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=52911.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:50:44,368 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3773, 2.4280, 1.9359, 2.1143, 2.4636, 1.9590, 3.3261, 1.7510], + device='cuda:0'), covar=tensor([0.4005, 0.2133, 0.4825, 0.3019, 0.1872, 0.2618, 0.1386, 0.4561], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0348, 0.0428, 0.0362, 0.0387, 0.0383, 0.0379, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:50:46,658 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.089e+02 1.765e+02 2.062e+02 2.587e+02 4.153e+02, threshold=4.125e+02, percent-clipped=1.0 +2023-04-26 23:50:55,066 INFO [finetune.py:976] (0/7) Epoch 10, batch 1400, loss[loss=0.1287, simple_loss=0.2048, pruned_loss=0.02632, over 4747.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.258, pruned_loss=0.06321, over 956821.91 frames. ], batch size: 23, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:50:58,842 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-26 23:51:09,502 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=52968.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:51:10,292 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-26 23:51:25,779 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.5667, 3.4681, 2.5634, 4.1052, 3.5859, 3.5364, 1.6261, 3.5176], + device='cuda:0'), covar=tensor([0.1757, 0.1387, 0.3372, 0.1717, 0.2692, 0.1821, 0.5392, 0.2311], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0217, 0.0248, 0.0302, 0.0299, 0.0249, 0.0268, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 23:51:28,732 INFO [finetune.py:976] (0/7) Epoch 10, batch 1450, loss[loss=0.2141, simple_loss=0.2836, pruned_loss=0.07225, over 4818.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2614, pruned_loss=0.06415, over 955905.57 frames. ], batch size: 33, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:51:31,449 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9873, 1.5905, 1.7998, 2.1474, 1.8617, 1.5017, 1.3380, 1.7019], + device='cuda:0'), covar=tensor([0.2715, 0.3375, 0.1573, 0.2126, 0.2551, 0.2568, 0.4441, 0.2169], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0252, 0.0220, 0.0319, 0.0215, 0.0229, 0.0234, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 23:51:41,184 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=53016.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:52:04,980 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.250e+02 1.692e+02 2.005e+02 2.440e+02 4.945e+02, threshold=4.009e+02, percent-clipped=1.0 +2023-04-26 23:52:18,928 INFO [finetune.py:976] (0/7) Epoch 10, batch 1500, loss[loss=0.1681, simple_loss=0.2384, pruned_loss=0.04894, over 4754.00 frames. ], tot_loss[loss=0.1962, simple_loss=0.263, pruned_loss=0.06469, over 955743.89 frames. ], batch size: 26, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:52:28,785 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53056.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:53:19,946 INFO [finetune.py:976] (0/7) Epoch 10, batch 1550, loss[loss=0.2248, simple_loss=0.2918, pruned_loss=0.07887, over 4891.00 frames. ], tot_loss[loss=0.1955, simple_loss=0.2623, pruned_loss=0.06436, over 956118.22 frames. ], batch size: 36, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:53:25,471 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53108.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:53:32,441 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53117.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:53:45,210 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.067e+02 1.713e+02 2.006e+02 2.452e+02 5.414e+02, threshold=4.013e+02, percent-clipped=2.0 +2023-04-26 23:53:53,127 INFO [finetune.py:976] (0/7) Epoch 10, batch 1600, loss[loss=0.1764, simple_loss=0.234, pruned_loss=0.05942, over 4849.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2594, pruned_loss=0.0634, over 957570.98 frames. ], batch size: 44, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:54:44,152 INFO [finetune.py:976] (0/7) Epoch 10, batch 1650, loss[loss=0.1994, simple_loss=0.265, pruned_loss=0.06692, over 4755.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2562, pruned_loss=0.0624, over 955766.11 frames. ], batch size: 27, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:54:47,347 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4505, 1.6943, 1.8110, 1.9933, 1.7512, 1.8940, 1.9070, 1.8311], + device='cuda:0'), covar=tensor([0.5433, 0.7774, 0.6232, 0.5690, 0.7244, 1.0181, 0.7351, 0.6789], + device='cuda:0'), in_proj_covar=tensor([0.0328, 0.0384, 0.0318, 0.0327, 0.0342, 0.0404, 0.0363, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-26 23:54:49,098 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1670, 1.4386, 5.2401, 4.9481, 4.5701, 4.9630, 4.5890, 4.7176], + device='cuda:0'), covar=tensor([0.6156, 0.5979, 0.0863, 0.1345, 0.0948, 0.1436, 0.1270, 0.1359], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0305, 0.0404, 0.0407, 0.0348, 0.0404, 0.0313, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:54:52,159 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53211.0, num_to_drop=1, layers_to_drop={2} +2023-04-26 23:55:09,622 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.636e+02 1.864e+02 2.338e+02 5.656e+02, threshold=3.728e+02, percent-clipped=1.0 +2023-04-26 23:55:12,823 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53241.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:55:17,567 INFO [finetune.py:976] (0/7) Epoch 10, batch 1700, loss[loss=0.1528, simple_loss=0.2189, pruned_loss=0.04334, over 4834.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2533, pruned_loss=0.06117, over 955242.27 frames. ], batch size: 33, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:55:24,100 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=53259.0, num_to_drop=1, layers_to_drop={1} +2023-04-26 23:55:36,649 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7208, 4.1804, 0.6239, 2.2638, 2.2925, 2.6991, 2.5148, 0.9617], + device='cuda:0'), covar=tensor([0.1521, 0.1094, 0.2480, 0.1324, 0.1150, 0.1230, 0.1476, 0.2244], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0249, 0.0140, 0.0122, 0.0134, 0.0154, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-26 23:55:51,480 INFO [finetune.py:976] (0/7) Epoch 10, batch 1750, loss[loss=0.2189, simple_loss=0.2901, pruned_loss=0.0739, over 4841.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2558, pruned_loss=0.0618, over 955889.00 frames. ], batch size: 44, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:55:53,419 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53302.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:55:54,629 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53304.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:56:01,762 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53315.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:56:04,340 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-26 23:56:16,877 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.803e+02 2.178e+02 2.575e+02 5.782e+02, threshold=4.356e+02, percent-clipped=6.0 +2023-04-26 23:56:25,433 INFO [finetune.py:976] (0/7) Epoch 10, batch 1800, loss[loss=0.153, simple_loss=0.2335, pruned_loss=0.03621, over 4926.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2595, pruned_loss=0.06249, over 957432.24 frames. ], batch size: 42, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:56:35,668 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53365.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:56:36,895 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=53367.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:56:42,899 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53376.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:56:45,225 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0837, 2.7154, 1.9561, 2.0648, 1.4245, 1.4490, 2.2962, 1.3911], + device='cuda:0'), covar=tensor([0.1693, 0.1710, 0.1602, 0.1884, 0.2558, 0.2064, 0.1064, 0.2146], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0217, 0.0171, 0.0205, 0.0206, 0.0185, 0.0162, 0.0190], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 23:56:59,055 INFO [finetune.py:976] (0/7) Epoch 10, batch 1850, loss[loss=0.1917, simple_loss=0.2512, pruned_loss=0.06612, over 4885.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2614, pruned_loss=0.06361, over 955552.56 frames. ], batch size: 32, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:57:10,953 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53408.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:57:13,860 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53412.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:57:34,988 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=53428.0, num_to_drop=1, layers_to_drop={0} +2023-04-26 23:57:43,201 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4449, 1.8559, 2.2837, 2.8855, 2.2009, 1.7086, 1.6417, 2.1299], + device='cuda:0'), covar=tensor([0.3622, 0.3846, 0.1778, 0.2782, 0.3411, 0.3081, 0.4414, 0.2667], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0252, 0.0221, 0.0318, 0.0214, 0.0229, 0.0234, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-26 23:57:45,404 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.032e+02 1.670e+02 2.068e+02 2.536e+02 6.143e+02, threshold=4.136e+02, percent-clipped=4.0 +2023-04-26 23:58:05,924 INFO [finetune.py:976] (0/7) Epoch 10, batch 1900, loss[loss=0.1971, simple_loss=0.2712, pruned_loss=0.0615, over 4825.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2613, pruned_loss=0.06297, over 955932.75 frames. ], batch size: 39, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:58:06,010 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8524, 1.2907, 4.9443, 4.5874, 4.3182, 4.7755, 4.4974, 4.4545], + device='cuda:0'), covar=tensor([0.6922, 0.6545, 0.1079, 0.2039, 0.1169, 0.1176, 0.1322, 0.1621], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0311, 0.0411, 0.0414, 0.0353, 0.0408, 0.0317, 0.0375], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-26 23:58:15,429 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=53456.0, num_to_drop=0, layers_to_drop=set() +2023-04-26 23:58:30,322 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5351, 1.2670, 0.5425, 1.2203, 1.4980, 1.3953, 1.3094, 1.3130], + device='cuda:0'), covar=tensor([0.0552, 0.0438, 0.0438, 0.0617, 0.0304, 0.0566, 0.0565, 0.0637], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0025, 0.0023, 0.0030, 0.0021, 0.0029, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0038, 0.0050, 0.0037, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-26 23:58:48,692 INFO [finetune.py:976] (0/7) Epoch 10, batch 1950, loss[loss=0.1645, simple_loss=0.2379, pruned_loss=0.04559, over 4806.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2594, pruned_loss=0.06162, over 956586.61 frames. ], batch size: 40, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:59:12,238 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.213e+02 1.722e+02 1.968e+02 2.257e+02 3.746e+02, threshold=3.936e+02, percent-clipped=0.0 +2023-04-26 23:59:22,127 INFO [finetune.py:976] (0/7) Epoch 10, batch 2000, loss[loss=0.1686, simple_loss=0.2442, pruned_loss=0.04649, over 4825.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2569, pruned_loss=0.06118, over 955098.68 frames. ], batch size: 25, lr: 3.75e-03, grad_scale: 16.0 +2023-04-26 23:59:22,883 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7003, 1.3205, 1.8299, 2.1905, 1.8435, 1.6528, 1.7651, 1.7373], + device='cuda:0'), covar=tensor([0.6220, 0.8625, 0.8285, 0.8337, 0.7594, 1.0550, 1.0724, 0.9951], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0414, 0.0499, 0.0517, 0.0436, 0.0455, 0.0466, 0.0465], + device='cuda:0'), out_proj_covar=tensor([9.9582e-05, 1.0253e-04, 1.1248e-04, 1.2297e-04, 1.0596e-04, 1.1012e-04, + 1.1189e-04, 1.1209e-04], device='cuda:0') +2023-04-26 23:59:24,671 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7555, 2.3990, 1.7807, 1.6631, 1.2495, 1.3192, 1.8118, 1.2643], + device='cuda:0'), covar=tensor([0.2032, 0.1627, 0.1659, 0.2031, 0.2740, 0.2343, 0.1258, 0.2388], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0217, 0.0171, 0.0205, 0.0205, 0.0185, 0.0162, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-26 23:59:29,713 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-27 00:00:04,126 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53597.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:00:05,764 INFO [finetune.py:976] (0/7) Epoch 10, batch 2050, loss[loss=0.2156, simple_loss=0.2703, pruned_loss=0.08044, over 4829.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2539, pruned_loss=0.06036, over 955132.81 frames. ], batch size: 33, lr: 3.75e-03, grad_scale: 16.0 +2023-04-27 00:00:22,002 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.4238, 1.2920, 1.3996, 0.9934, 1.4066, 1.1248, 1.7589, 1.3541], + device='cuda:0'), covar=tensor([0.4036, 0.2005, 0.5659, 0.2798, 0.1679, 0.2471, 0.1689, 0.5029], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0345, 0.0428, 0.0360, 0.0386, 0.0380, 0.0376, 0.0418], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:00:34,508 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.698e+02 1.922e+02 2.404e+02 4.326e+02, threshold=3.844e+02, percent-clipped=2.0 +2023-04-27 00:00:36,491 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5355, 1.7583, 1.4133, 1.1747, 1.1818, 1.1349, 1.4040, 1.1525], + device='cuda:0'), covar=tensor([0.1776, 0.1387, 0.1533, 0.1872, 0.2494, 0.1953, 0.1140, 0.2091], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0215, 0.0170, 0.0204, 0.0204, 0.0184, 0.0161, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 00:00:44,012 INFO [finetune.py:976] (0/7) Epoch 10, batch 2100, loss[loss=0.2278, simple_loss=0.2802, pruned_loss=0.08771, over 4914.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2535, pruned_loss=0.0607, over 956502.94 frames. ], batch size: 36, lr: 3.75e-03, grad_scale: 32.0 +2023-04-27 00:00:51,818 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53660.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:00:52,518 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1495, 1.6013, 1.9840, 2.4429, 1.9334, 1.5377, 1.3368, 1.7899], + device='cuda:0'), covar=tensor([0.3770, 0.4012, 0.1985, 0.2781, 0.3389, 0.3157, 0.5022, 0.2620], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0250, 0.0219, 0.0315, 0.0213, 0.0227, 0.0234, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 00:00:58,521 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53671.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:01:16,443 INFO [finetune.py:976] (0/7) Epoch 10, batch 2150, loss[loss=0.2672, simple_loss=0.3174, pruned_loss=0.1085, over 4876.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2575, pruned_loss=0.06215, over 958083.60 frames. ], batch size: 31, lr: 3.75e-03, grad_scale: 32.0 +2023-04-27 00:01:26,019 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53712.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:01:26,682 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1973, 1.6919, 2.0308, 2.4005, 1.9673, 1.6283, 1.2002, 1.7017], + device='cuda:0'), covar=tensor([0.3756, 0.3772, 0.1853, 0.2607, 0.3153, 0.2885, 0.4996, 0.2791], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0250, 0.0219, 0.0315, 0.0213, 0.0227, 0.0233, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 00:01:32,721 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=53723.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 00:01:38,785 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3827, 3.0078, 0.9610, 1.6546, 2.4058, 1.5846, 4.2520, 2.1549], + device='cuda:0'), covar=tensor([0.0628, 0.0833, 0.0909, 0.1312, 0.0517, 0.0963, 0.0186, 0.0589], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0067, 0.0050, 0.0047, 0.0051, 0.0052, 0.0079, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 00:01:41,147 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.745e+02 2.178e+02 2.516e+02 3.729e+02, threshold=4.356e+02, percent-clipped=0.0 +2023-04-27 00:01:49,681 INFO [finetune.py:976] (0/7) Epoch 10, batch 2200, loss[loss=0.1881, simple_loss=0.2594, pruned_loss=0.05844, over 4748.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2601, pruned_loss=0.06281, over 958258.54 frames. ], batch size: 54, lr: 3.75e-03, grad_scale: 32.0 +2023-04-27 00:01:52,222 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5907, 1.4420, 2.0025, 1.9788, 1.4757, 1.2650, 1.5445, 1.0806], + device='cuda:0'), covar=tensor([0.0703, 0.1082, 0.0551, 0.0874, 0.0947, 0.1469, 0.0905, 0.0933], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0073, 0.0071, 0.0067, 0.0076, 0.0096, 0.0077, 0.0073], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:01:57,878 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=53760.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:02:22,060 INFO [finetune.py:976] (0/7) Epoch 10, batch 2250, loss[loss=0.1648, simple_loss=0.2464, pruned_loss=0.04158, over 4861.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2606, pruned_loss=0.06277, over 958053.22 frames. ], batch size: 31, lr: 3.75e-03, grad_scale: 32.0 +2023-04-27 00:02:46,629 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.602e+02 1.955e+02 2.365e+02 4.712e+02, threshold=3.910e+02, percent-clipped=2.0 +2023-04-27 00:03:06,366 INFO [finetune.py:976] (0/7) Epoch 10, batch 2300, loss[loss=0.1893, simple_loss=0.2552, pruned_loss=0.06166, over 4910.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2606, pruned_loss=0.06199, over 958881.18 frames. ], batch size: 36, lr: 3.75e-03, grad_scale: 32.0 +2023-04-27 00:03:51,398 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 00:04:05,037 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53897.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:04:11,997 INFO [finetune.py:976] (0/7) Epoch 10, batch 2350, loss[loss=0.1634, simple_loss=0.2305, pruned_loss=0.04816, over 4932.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2586, pruned_loss=0.06162, over 957096.86 frames. ], batch size: 33, lr: 3.75e-03, grad_scale: 32.0 +2023-04-27 00:04:36,292 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2276, 1.4268, 1.6135, 1.7904, 1.6767, 1.8228, 1.7464, 1.6597], + device='cuda:0'), covar=tensor([0.4319, 0.5343, 0.4997, 0.4802, 0.5827, 0.8585, 0.5225, 0.5438], + device='cuda:0'), in_proj_covar=tensor([0.0324, 0.0381, 0.0314, 0.0324, 0.0338, 0.0401, 0.0359, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 00:04:55,900 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2309, 2.8459, 1.0042, 1.7449, 1.7678, 2.1884, 1.8363, 1.0991], + device='cuda:0'), covar=tensor([0.1304, 0.0948, 0.1647, 0.1123, 0.0926, 0.0882, 0.1258, 0.1872], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0250, 0.0140, 0.0123, 0.0134, 0.0154, 0.0119, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:04:57,666 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.640e+02 1.984e+02 2.431e+02 6.591e+02, threshold=3.969e+02, percent-clipped=4.0 +2023-04-27 00:05:08,691 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=53945.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:05:16,070 INFO [finetune.py:976] (0/7) Epoch 10, batch 2400, loss[loss=0.1742, simple_loss=0.2392, pruned_loss=0.05464, over 4901.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2561, pruned_loss=0.06127, over 955560.81 frames. ], batch size: 32, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:05:21,555 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5688, 1.9725, 1.7835, 1.8660, 1.6288, 1.8077, 1.6532, 1.3578], + device='cuda:0'), covar=tensor([0.2038, 0.1295, 0.0775, 0.1385, 0.2898, 0.1094, 0.1872, 0.2357], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0318, 0.0228, 0.0289, 0.0314, 0.0270, 0.0258, 0.0277], + device='cuda:0'), out_proj_covar=tensor([1.1867e-04, 1.2807e-04, 9.1594e-05, 1.1592e-04, 1.2865e-04, 1.0875e-04, + 1.0539e-04, 1.1124e-04], device='cuda:0') +2023-04-27 00:05:23,326 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53960.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:05:32,002 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=53971.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:05:49,429 INFO [finetune.py:976] (0/7) Epoch 10, batch 2450, loss[loss=0.1966, simple_loss=0.265, pruned_loss=0.06415, over 4826.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2531, pruned_loss=0.0607, over 956074.50 frames. ], batch size: 39, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:05:50,166 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-54000.pt +2023-04-27 00:05:52,059 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-27 00:05:56,235 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=54008.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:05:58,430 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4966, 1.3924, 3.9346, 3.6668, 3.4746, 3.5997, 3.5982, 3.4842], + device='cuda:0'), covar=tensor([0.6842, 0.5344, 0.1099, 0.1829, 0.1181, 0.1893, 0.2650, 0.1539], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0311, 0.0408, 0.0412, 0.0351, 0.0409, 0.0316, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:06:04,941 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=54019.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:06:07,906 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=54023.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 00:06:13,971 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4944, 3.0301, 1.1582, 1.8592, 1.7893, 2.4174, 1.8899, 1.2497], + device='cuda:0'), covar=tensor([0.1167, 0.0830, 0.1625, 0.1121, 0.0992, 0.0777, 0.1400, 0.1852], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0250, 0.0140, 0.0123, 0.0135, 0.0154, 0.0119, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:06:15,671 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.726e+02 2.083e+02 2.546e+02 5.806e+02, threshold=4.166e+02, percent-clipped=1.0 +2023-04-27 00:06:24,053 INFO [finetune.py:976] (0/7) Epoch 10, batch 2500, loss[loss=0.2095, simple_loss=0.2815, pruned_loss=0.06878, over 4871.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2557, pruned_loss=0.06228, over 954610.27 frames. ], batch size: 34, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:06:31,398 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7872, 2.4621, 1.8240, 1.7099, 1.2823, 1.3169, 1.9170, 1.2727], + device='cuda:0'), covar=tensor([0.1720, 0.1345, 0.1559, 0.1912, 0.2440, 0.2136, 0.1136, 0.2112], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0215, 0.0170, 0.0205, 0.0204, 0.0185, 0.0161, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 00:06:39,514 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=54071.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:06:48,275 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-27 00:06:57,702 INFO [finetune.py:976] (0/7) Epoch 10, batch 2550, loss[loss=0.1597, simple_loss=0.2172, pruned_loss=0.05111, over 4146.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2602, pruned_loss=0.06341, over 955392.25 frames. ], batch size: 18, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:06:57,819 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6993, 2.1943, 1.9111, 2.0462, 1.5607, 1.8677, 1.8269, 1.4701], + device='cuda:0'), covar=tensor([0.2497, 0.1455, 0.0872, 0.1416, 0.3563, 0.1355, 0.2262, 0.2790], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0317, 0.0228, 0.0288, 0.0314, 0.0270, 0.0259, 0.0277], + device='cuda:0'), out_proj_covar=tensor([1.1872e-04, 1.2760e-04, 9.1509e-05, 1.1556e-04, 1.2848e-04, 1.0868e-04, + 1.0575e-04, 1.1111e-04], device='cuda:0') +2023-04-27 00:06:59,057 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9897, 2.0012, 1.6672, 1.6968, 2.1920, 1.7004, 2.6853, 1.5537], + device='cuda:0'), covar=tensor([0.4406, 0.2147, 0.5581, 0.3503, 0.2099, 0.2771, 0.1683, 0.4877], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0348, 0.0432, 0.0362, 0.0387, 0.0384, 0.0380, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:06:59,681 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6118, 2.1247, 1.6272, 1.4240, 1.1818, 1.2305, 1.7000, 1.1598], + device='cuda:0'), covar=tensor([0.1804, 0.1544, 0.1594, 0.2032, 0.2599, 0.2074, 0.1124, 0.2214], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0215, 0.0170, 0.0205, 0.0204, 0.0185, 0.0161, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 00:07:15,635 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.33 vs. limit=5.0 +2023-04-27 00:07:22,641 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.109e+02 1.598e+02 1.963e+02 2.419e+02 4.837e+02, threshold=3.926e+02, percent-clipped=1.0 +2023-04-27 00:07:30,643 INFO [finetune.py:976] (0/7) Epoch 10, batch 2600, loss[loss=0.1885, simple_loss=0.2577, pruned_loss=0.05959, over 4886.00 frames. ], tot_loss[loss=0.1956, simple_loss=0.2627, pruned_loss=0.06421, over 955003.42 frames. ], batch size: 32, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:08:04,395 INFO [finetune.py:976] (0/7) Epoch 10, batch 2650, loss[loss=0.1705, simple_loss=0.2469, pruned_loss=0.04702, over 4892.00 frames. ], tot_loss[loss=0.195, simple_loss=0.2624, pruned_loss=0.06376, over 955235.39 frames. ], batch size: 43, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:08:39,722 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.638e+02 1.968e+02 2.302e+02 4.272e+02, threshold=3.936e+02, percent-clipped=1.0 +2023-04-27 00:08:49,379 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6468, 3.6770, 0.8443, 1.8946, 2.0302, 2.5188, 2.0257, 1.0273], + device='cuda:0'), covar=tensor([0.1432, 0.0989, 0.2244, 0.1455, 0.1064, 0.1104, 0.1746, 0.2115], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0251, 0.0141, 0.0123, 0.0136, 0.0155, 0.0120, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:08:52,969 INFO [finetune.py:976] (0/7) Epoch 10, batch 2700, loss[loss=0.1854, simple_loss=0.2481, pruned_loss=0.06134, over 4759.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2617, pruned_loss=0.0634, over 956743.71 frames. ], batch size: 27, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:09:46,295 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3951, 1.3577, 4.0620, 3.8162, 3.5660, 3.8018, 3.8073, 3.6059], + device='cuda:0'), covar=tensor([0.6896, 0.5553, 0.1146, 0.1645, 0.1098, 0.1746, 0.1468, 0.1435], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0308, 0.0405, 0.0407, 0.0348, 0.0405, 0.0314, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:10:03,551 INFO [finetune.py:976] (0/7) Epoch 10, batch 2750, loss[loss=0.192, simple_loss=0.2497, pruned_loss=0.06718, over 4833.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2597, pruned_loss=0.06325, over 956537.43 frames. ], batch size: 33, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:10:49,706 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.579e+02 1.855e+02 2.381e+02 3.900e+02, threshold=3.711e+02, percent-clipped=0.0 +2023-04-27 00:10:58,937 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7847, 2.6360, 2.9552, 3.1861, 3.1238, 2.3938, 2.2170, 2.9446], + device='cuda:0'), covar=tensor([0.0904, 0.0962, 0.0527, 0.0577, 0.0645, 0.1018, 0.0831, 0.0554], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0203, 0.0182, 0.0175, 0.0178, 0.0189, 0.0160, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:10:58,963 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3623, 3.1045, 2.3440, 2.2128, 1.6748, 1.6434, 2.4753, 1.7439], + device='cuda:0'), covar=tensor([0.1625, 0.1459, 0.1510, 0.1879, 0.2377, 0.1998, 0.1121, 0.1981], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0215, 0.0170, 0.0204, 0.0204, 0.0184, 0.0160, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 00:11:09,559 INFO [finetune.py:976] (0/7) Epoch 10, batch 2800, loss[loss=0.1689, simple_loss=0.2382, pruned_loss=0.04973, over 4749.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2569, pruned_loss=0.06221, over 956860.04 frames. ], batch size: 27, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:11:48,130 INFO [finetune.py:976] (0/7) Epoch 10, batch 2850, loss[loss=0.2231, simple_loss=0.2714, pruned_loss=0.08738, over 4133.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2552, pruned_loss=0.06172, over 954844.18 frames. ], batch size: 65, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:11:59,440 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-27 00:12:11,872 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.761e+02 2.016e+02 2.397e+02 4.207e+02, threshold=4.033e+02, percent-clipped=3.0 +2023-04-27 00:12:15,488 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5212, 0.9933, 1.5176, 1.9830, 1.6163, 1.5135, 1.5430, 1.5501], + device='cuda:0'), covar=tensor([0.5837, 0.7959, 0.8183, 0.7938, 0.7090, 0.9301, 0.9121, 0.9378], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0415, 0.0499, 0.0518, 0.0437, 0.0456, 0.0467, 0.0465], + device='cuda:0'), out_proj_covar=tensor([9.9482e-05, 1.0284e-04, 1.1247e-04, 1.2316e-04, 1.0619e-04, 1.1028e-04, + 1.1198e-04, 1.1203e-04], device='cuda:0') +2023-04-27 00:12:21,810 INFO [finetune.py:976] (0/7) Epoch 10, batch 2900, loss[loss=0.2285, simple_loss=0.3034, pruned_loss=0.07684, over 4836.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2585, pruned_loss=0.06302, over 954845.14 frames. ], batch size: 47, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:12:28,667 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3323, 3.0246, 1.1006, 1.6927, 1.6275, 2.2457, 1.7798, 1.1752], + device='cuda:0'), covar=tensor([0.1647, 0.1352, 0.1908, 0.1699, 0.1257, 0.1133, 0.1679, 0.2289], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0249, 0.0140, 0.0123, 0.0135, 0.0154, 0.0119, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:12:41,612 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.71 vs. limit=5.0 +2023-04-27 00:12:55,782 INFO [finetune.py:976] (0/7) Epoch 10, batch 2950, loss[loss=0.1727, simple_loss=0.2423, pruned_loss=0.05153, over 4778.00 frames. ], tot_loss[loss=0.1946, simple_loss=0.2614, pruned_loss=0.06392, over 955167.85 frames. ], batch size: 26, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:13:19,230 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.620e+02 2.110e+02 2.463e+02 5.874e+02, threshold=4.221e+02, percent-clipped=2.0 +2023-04-27 00:13:21,673 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2468, 1.5049, 1.6174, 1.7679, 1.6255, 1.7449, 1.7780, 1.6958], + device='cuda:0'), covar=tensor([0.5124, 0.6158, 0.5427, 0.5141, 0.6325, 0.8723, 0.5751, 0.5939], + device='cuda:0'), in_proj_covar=tensor([0.0322, 0.0377, 0.0312, 0.0322, 0.0335, 0.0397, 0.0355, 0.0320], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 00:13:29,124 INFO [finetune.py:976] (0/7) Epoch 10, batch 3000, loss[loss=0.1896, simple_loss=0.262, pruned_loss=0.05857, over 4895.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2617, pruned_loss=0.06403, over 953842.68 frames. ], batch size: 36, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:13:29,125 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 00:13:36,777 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7426, 2.1331, 1.7862, 1.9850, 1.6190, 1.7996, 1.8078, 1.4554], + device='cuda:0'), covar=tensor([0.1683, 0.1079, 0.0863, 0.1043, 0.3166, 0.1035, 0.1665, 0.2345], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0317, 0.0227, 0.0287, 0.0314, 0.0269, 0.0257, 0.0277], + device='cuda:0'), out_proj_covar=tensor([1.1843e-04, 1.2758e-04, 9.1263e-05, 1.1513e-04, 1.2847e-04, 1.0836e-04, + 1.0488e-04, 1.1115e-04], device='cuda:0') +2023-04-27 00:13:45,199 INFO [finetune.py:1010] (0/7) Epoch 10, validation: loss=0.1531, simple_loss=0.2257, pruned_loss=0.04026, over 2265189.00 frames. +2023-04-27 00:13:45,200 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-27 00:14:08,953 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 00:14:32,351 INFO [finetune.py:976] (0/7) Epoch 10, batch 3050, loss[loss=0.1708, simple_loss=0.2413, pruned_loss=0.05017, over 4899.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2607, pruned_loss=0.06306, over 954342.41 frames. ], batch size: 35, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:14:57,003 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.192e+02 1.553e+02 1.895e+02 2.251e+02 3.622e+02, threshold=3.789e+02, percent-clipped=0.0 +2023-04-27 00:14:59,065 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-27 00:15:05,018 INFO [finetune.py:976] (0/7) Epoch 10, batch 3100, loss[loss=0.2163, simple_loss=0.2763, pruned_loss=0.07819, over 4780.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.259, pruned_loss=0.06238, over 951921.80 frames. ], batch size: 51, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:15:27,240 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54664.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 00:15:57,295 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6817, 1.4063, 0.6437, 1.2936, 1.4350, 1.5422, 1.4213, 1.3427], + device='cuda:0'), covar=tensor([0.0497, 0.0394, 0.0392, 0.0561, 0.0283, 0.0502, 0.0493, 0.0580], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0026, 0.0023, 0.0030, 0.0021, 0.0029, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0050, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-27 00:16:02,134 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6969, 1.4393, 1.8885, 2.1314, 1.8544, 1.7307, 1.8088, 1.8091], + device='cuda:0'), covar=tensor([0.6469, 0.8619, 0.8815, 0.8854, 0.7936, 1.0325, 1.1291, 1.0429], + device='cuda:0'), in_proj_covar=tensor([0.0410, 0.0415, 0.0499, 0.0519, 0.0439, 0.0457, 0.0468, 0.0466], + device='cuda:0'), out_proj_covar=tensor([9.9770e-05, 1.0291e-04, 1.1265e-04, 1.2335e-04, 1.0647e-04, 1.1050e-04, + 1.1217e-04, 1.1236e-04], device='cuda:0') +2023-04-27 00:16:11,741 INFO [finetune.py:976] (0/7) Epoch 10, batch 3150, loss[loss=0.1631, simple_loss=0.2366, pruned_loss=0.04479, over 4935.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2558, pruned_loss=0.06114, over 952254.86 frames. ], batch size: 33, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:16:23,201 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3351, 3.2055, 2.4601, 3.8693, 3.3535, 3.3735, 1.4580, 3.2723], + device='cuda:0'), covar=tensor([0.1710, 0.1379, 0.3444, 0.2216, 0.2963, 0.1746, 0.5438, 0.2424], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0214, 0.0247, 0.0302, 0.0298, 0.0248, 0.0267, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 00:16:53,616 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54725.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 00:17:04,899 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.617e+02 1.915e+02 2.289e+02 4.542e+02, threshold=3.830e+02, percent-clipped=1.0 +2023-04-27 00:17:25,055 INFO [finetune.py:976] (0/7) Epoch 10, batch 3200, loss[loss=0.1511, simple_loss=0.2159, pruned_loss=0.04312, over 4714.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2535, pruned_loss=0.06046, over 953611.22 frames. ], batch size: 23, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:18:08,050 INFO [finetune.py:976] (0/7) Epoch 10, batch 3250, loss[loss=0.2, simple_loss=0.2618, pruned_loss=0.06908, over 4739.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2536, pruned_loss=0.06122, over 952038.00 frames. ], batch size: 27, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:18:33,642 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.703e+02 2.055e+02 2.409e+02 4.051e+02, threshold=4.111e+02, percent-clipped=1.0 +2023-04-27 00:18:38,576 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.00 vs. limit=5.0 +2023-04-27 00:18:42,051 INFO [finetune.py:976] (0/7) Epoch 10, batch 3300, loss[loss=0.2011, simple_loss=0.2781, pruned_loss=0.06207, over 4817.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2575, pruned_loss=0.06334, over 952089.98 frames. ], batch size: 40, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:18:50,580 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=54862.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:19:15,377 INFO [finetune.py:976] (0/7) Epoch 10, batch 3350, loss[loss=0.2157, simple_loss=0.2728, pruned_loss=0.07929, over 4787.00 frames. ], tot_loss[loss=0.1928, simple_loss=0.2593, pruned_loss=0.06316, over 952215.82 frames. ], batch size: 51, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:19:29,999 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 00:19:30,550 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=54923.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 00:19:39,809 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.272e+02 1.802e+02 2.053e+02 2.510e+02 4.625e+02, threshold=4.107e+02, percent-clipped=1.0 +2023-04-27 00:19:47,702 INFO [finetune.py:976] (0/7) Epoch 10, batch 3400, loss[loss=0.2073, simple_loss=0.2745, pruned_loss=0.07003, over 4900.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2601, pruned_loss=0.06352, over 953288.60 frames. ], batch size: 36, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:20:20,498 INFO [finetune.py:976] (0/7) Epoch 10, batch 3450, loss[loss=0.1909, simple_loss=0.2577, pruned_loss=0.06211, over 4907.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2591, pruned_loss=0.06243, over 952776.93 frames. ], batch size: 37, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:20:33,863 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55020.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 00:20:45,488 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.637e+02 2.017e+02 2.373e+02 5.913e+02, threshold=4.034e+02, percent-clipped=2.0 +2023-04-27 00:20:53,437 INFO [finetune.py:976] (0/7) Epoch 10, batch 3500, loss[loss=0.1779, simple_loss=0.2437, pruned_loss=0.05605, over 4771.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2571, pruned_loss=0.06214, over 953563.84 frames. ], batch size: 28, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:21:46,851 INFO [finetune.py:976] (0/7) Epoch 10, batch 3550, loss[loss=0.2037, simple_loss=0.2514, pruned_loss=0.07801, over 4797.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2539, pruned_loss=0.06115, over 955064.39 frames. ], batch size: 25, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:21:48,182 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9905, 1.7030, 2.0796, 2.3616, 1.7809, 1.5132, 1.8026, 1.0478], + device='cuda:0'), covar=tensor([0.0609, 0.0932, 0.0573, 0.0704, 0.0815, 0.1208, 0.0921, 0.0986], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0073, 0.0071, 0.0067, 0.0076, 0.0096, 0.0077, 0.0073], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:22:27,074 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.344e+01 1.599e+02 1.898e+02 2.251e+02 4.553e+02, threshold=3.796e+02, percent-clipped=2.0 +2023-04-27 00:22:35,898 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55148.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:22:36,396 INFO [finetune.py:976] (0/7) Epoch 10, batch 3600, loss[loss=0.1589, simple_loss=0.2237, pruned_loss=0.04704, over 4762.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2519, pruned_loss=0.06054, over 956351.75 frames. ], batch size: 26, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:23:26,778 INFO [finetune.py:976] (0/7) Epoch 10, batch 3650, loss[loss=0.2444, simple_loss=0.2995, pruned_loss=0.09465, over 4915.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2563, pruned_loss=0.06317, over 954727.72 frames. ], batch size: 36, lr: 3.74e-03, grad_scale: 32.0 +2023-04-27 00:23:33,111 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55209.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:23:34,360 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55211.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:23:39,097 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55218.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 00:23:42,765 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7628, 1.4923, 1.9328, 2.0719, 1.5534, 1.2892, 1.5705, 1.0274], + device='cuda:0'), covar=tensor([0.0463, 0.0676, 0.0432, 0.0531, 0.0710, 0.1252, 0.0654, 0.0813], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0072, 0.0071, 0.0067, 0.0075, 0.0096, 0.0077, 0.0072], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:23:50,725 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.786e+02 2.133e+02 2.599e+02 5.918e+02, threshold=4.267e+02, percent-clipped=5.0 +2023-04-27 00:24:00,573 INFO [finetune.py:976] (0/7) Epoch 10, batch 3700, loss[loss=0.2332, simple_loss=0.2988, pruned_loss=0.0838, over 4881.00 frames. ], tot_loss[loss=0.1923, simple_loss=0.2583, pruned_loss=0.06315, over 954577.64 frames. ], batch size: 32, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:24:15,038 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55272.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:24:33,930 INFO [finetune.py:976] (0/7) Epoch 10, batch 3750, loss[loss=0.2552, simple_loss=0.3067, pruned_loss=0.1019, over 4732.00 frames. ], tot_loss[loss=0.1929, simple_loss=0.2597, pruned_loss=0.06305, over 954944.83 frames. ], batch size: 54, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:24:47,383 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55320.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 00:24:57,142 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.679e+02 1.930e+02 2.224e+02 3.496e+02, threshold=3.860e+02, percent-clipped=0.0 +2023-04-27 00:25:07,102 INFO [finetune.py:976] (0/7) Epoch 10, batch 3800, loss[loss=0.18, simple_loss=0.2535, pruned_loss=0.05325, over 4914.00 frames. ], tot_loss[loss=0.1948, simple_loss=0.2616, pruned_loss=0.06407, over 956116.40 frames. ], batch size: 33, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:25:19,724 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=55368.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 00:25:40,048 INFO [finetune.py:976] (0/7) Epoch 10, batch 3850, loss[loss=0.1604, simple_loss=0.2202, pruned_loss=0.05029, over 4814.00 frames. ], tot_loss[loss=0.192, simple_loss=0.259, pruned_loss=0.0625, over 956077.55 frames. ], batch size: 25, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:25:52,106 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8214, 1.4658, 1.4048, 1.6430, 2.0815, 1.6885, 1.4280, 1.4071], + device='cuda:0'), covar=tensor([0.1636, 0.1625, 0.1930, 0.1492, 0.0921, 0.1843, 0.2037, 0.1835], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0321, 0.0354, 0.0298, 0.0335, 0.0319, 0.0306, 0.0359], + device='cuda:0'), out_proj_covar=tensor([6.4171e-05, 6.8032e-05, 7.6431e-05, 6.1364e-05, 7.0077e-05, 6.8283e-05, + 6.5568e-05, 7.7135e-05], device='cuda:0') +2023-04-27 00:26:04,612 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.147e+02 1.602e+02 1.915e+02 2.260e+02 3.579e+02, threshold=3.830e+02, percent-clipped=0.0 +2023-04-27 00:26:12,957 INFO [finetune.py:976] (0/7) Epoch 10, batch 3900, loss[loss=0.2214, simple_loss=0.2844, pruned_loss=0.07924, over 4828.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2558, pruned_loss=0.06145, over 953739.89 frames. ], batch size: 40, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:26:24,636 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8974, 1.6340, 2.0520, 2.3064, 2.0056, 1.7803, 1.9043, 1.9157], + device='cuda:0'), covar=tensor([0.6369, 0.9356, 0.9991, 0.8839, 0.7928, 1.1321, 1.1789, 1.1033], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0414, 0.0497, 0.0517, 0.0437, 0.0456, 0.0467, 0.0464], + device='cuda:0'), out_proj_covar=tensor([9.9496e-05, 1.0251e-04, 1.1228e-04, 1.2272e-04, 1.0596e-04, 1.1016e-04, + 1.1187e-04, 1.1171e-04], device='cuda:0') +2023-04-27 00:26:25,232 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0095, 1.7890, 2.0888, 2.4250, 2.4998, 1.9599, 1.5433, 2.1490], + device='cuda:0'), covar=tensor([0.0954, 0.1181, 0.0690, 0.0590, 0.0629, 0.0914, 0.0900, 0.0633], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0205, 0.0183, 0.0176, 0.0179, 0.0189, 0.0161, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:27:07,759 INFO [finetune.py:976] (0/7) Epoch 10, batch 3950, loss[loss=0.1486, simple_loss=0.216, pruned_loss=0.04065, over 4703.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2523, pruned_loss=0.06024, over 955186.26 frames. ], batch size: 23, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:27:15,644 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55504.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:27:36,359 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55518.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 00:27:47,671 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.461e+01 1.659e+02 2.039e+02 2.496e+02 7.335e+02, threshold=4.077e+02, percent-clipped=3.0 +2023-04-27 00:27:56,582 INFO [finetune.py:976] (0/7) Epoch 10, batch 4000, loss[loss=0.1571, simple_loss=0.2346, pruned_loss=0.03985, over 4791.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2518, pruned_loss=0.0602, over 955819.17 frames. ], batch size: 29, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:28:09,058 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=55566.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:28:12,139 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=55567.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:28:43,369 INFO [finetune.py:976] (0/7) Epoch 10, batch 4050, loss[loss=0.1845, simple_loss=0.2624, pruned_loss=0.05333, over 4821.00 frames. ], tot_loss[loss=0.189, simple_loss=0.255, pruned_loss=0.06153, over 955085.15 frames. ], batch size: 39, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:29:09,491 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.009e+02 1.669e+02 1.991e+02 2.505e+02 4.319e+02, threshold=3.981e+02, percent-clipped=1.0 +2023-04-27 00:29:15,159 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9657, 1.8236, 2.1719, 2.4282, 2.0121, 1.8545, 2.0494, 1.9950], + device='cuda:0'), covar=tensor([0.6159, 0.8246, 0.9295, 0.8418, 0.7706, 1.1085, 1.0971, 1.0258], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0412, 0.0498, 0.0517, 0.0438, 0.0457, 0.0467, 0.0465], + device='cuda:0'), out_proj_covar=tensor([9.9666e-05, 1.0221e-04, 1.1250e-04, 1.2282e-04, 1.0615e-04, 1.1043e-04, + 1.1193e-04, 1.1191e-04], device='cuda:0') +2023-04-27 00:29:16,837 INFO [finetune.py:976] (0/7) Epoch 10, batch 4100, loss[loss=0.2188, simple_loss=0.2951, pruned_loss=0.07131, over 4832.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.259, pruned_loss=0.06261, over 954327.78 frames. ], batch size: 49, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:29:39,557 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.81 vs. limit=5.0 +2023-04-27 00:29:50,669 INFO [finetune.py:976] (0/7) Epoch 10, batch 4150, loss[loss=0.1685, simple_loss=0.2471, pruned_loss=0.045, over 4899.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.2601, pruned_loss=0.06335, over 950478.14 frames. ], batch size: 36, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:29:52,002 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8227, 1.8580, 1.0176, 1.4117, 2.2794, 1.6385, 1.5738, 1.5865], + device='cuda:0'), covar=tensor([0.0496, 0.0360, 0.0335, 0.0560, 0.0239, 0.0538, 0.0501, 0.0582], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0025, 0.0023, 0.0030, 0.0020, 0.0029, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0037, 0.0050, 0.0037, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-27 00:30:16,063 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.755e+02 2.031e+02 2.318e+02 3.746e+02, threshold=4.063e+02, percent-clipped=0.0 +2023-04-27 00:30:23,256 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55748.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:30:23,766 INFO [finetune.py:976] (0/7) Epoch 10, batch 4200, loss[loss=0.1895, simple_loss=0.2686, pruned_loss=0.05523, over 4817.00 frames. ], tot_loss[loss=0.1936, simple_loss=0.2603, pruned_loss=0.06347, over 949821.20 frames. ], batch size: 39, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:30:43,236 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1956, 1.7156, 2.1204, 2.4002, 2.0869, 1.6568, 1.2134, 1.8554], + device='cuda:0'), covar=tensor([0.3472, 0.3478, 0.1751, 0.2603, 0.2905, 0.2764, 0.4827, 0.2078], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0250, 0.0220, 0.0317, 0.0214, 0.0228, 0.0234, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 00:30:57,481 INFO [finetune.py:976] (0/7) Epoch 10, batch 4250, loss[loss=0.1732, simple_loss=0.2404, pruned_loss=0.05304, over 4832.00 frames. ], tot_loss[loss=0.1925, simple_loss=0.2585, pruned_loss=0.06327, over 951917.08 frames. ], batch size: 39, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:31:00,745 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55804.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:31:04,329 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55809.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:31:23,548 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.518e+01 1.564e+02 1.887e+02 2.474e+02 5.983e+02, threshold=3.773e+02, percent-clipped=1.0 +2023-04-27 00:31:30,790 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=55848.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:31:31,265 INFO [finetune.py:976] (0/7) Epoch 10, batch 4300, loss[loss=0.1881, simple_loss=0.2598, pruned_loss=0.05821, over 4713.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.256, pruned_loss=0.0622, over 954540.57 frames. ], batch size: 23, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:31:33,045 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=55852.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:31:48,275 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=55867.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:31:59,388 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-04-27 00:32:31,213 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9659, 1.5582, 1.3576, 1.9576, 2.0994, 1.8063, 1.7568, 1.3553], + device='cuda:0'), covar=tensor([0.1715, 0.1903, 0.2125, 0.1712, 0.1243, 0.2208, 0.2067, 0.2006], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0319, 0.0352, 0.0297, 0.0335, 0.0318, 0.0304, 0.0356], + device='cuda:0'), out_proj_covar=tensor([6.3966e-05, 6.7562e-05, 7.6034e-05, 6.1255e-05, 7.0146e-05, 6.8113e-05, + 6.5159e-05, 7.6430e-05], device='cuda:0') +2023-04-27 00:32:31,700 INFO [finetune.py:976] (0/7) Epoch 10, batch 4350, loss[loss=0.1858, simple_loss=0.2439, pruned_loss=0.06392, over 4222.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2527, pruned_loss=0.06101, over 954877.32 frames. ], batch size: 65, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:32:43,356 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=55909.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:32:46,920 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=55915.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:32:56,467 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4250, 1.2504, 1.6337, 1.5656, 1.3431, 1.1950, 1.2716, 0.8256], + device='cuda:0'), covar=tensor([0.0621, 0.0801, 0.0522, 0.0774, 0.0856, 0.1397, 0.0645, 0.0831], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0073, 0.0071, 0.0068, 0.0076, 0.0096, 0.0078, 0.0073], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:33:00,522 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.07 vs. limit=5.0 +2023-04-27 00:33:02,707 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.042e+02 1.769e+02 2.030e+02 2.376e+02 3.891e+02, threshold=4.060e+02, percent-clipped=2.0 +2023-04-27 00:33:10,568 INFO [finetune.py:976] (0/7) Epoch 10, batch 4400, loss[loss=0.1949, simple_loss=0.2718, pruned_loss=0.05904, over 4922.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2525, pruned_loss=0.06028, over 954059.31 frames. ], batch size: 38, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:34:18,726 INFO [finetune.py:976] (0/7) Epoch 10, batch 4450, loss[loss=0.2177, simple_loss=0.2719, pruned_loss=0.08181, over 4715.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2577, pruned_loss=0.06198, over 954986.73 frames. ], batch size: 23, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:34:19,507 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-56000.pt +2023-04-27 00:34:57,416 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.646e+02 1.926e+02 2.335e+02 3.268e+02, threshold=3.851e+02, percent-clipped=0.0 +2023-04-27 00:35:05,273 INFO [finetune.py:976] (0/7) Epoch 10, batch 4500, loss[loss=0.1976, simple_loss=0.2605, pruned_loss=0.06733, over 4829.00 frames. ], tot_loss[loss=0.1924, simple_loss=0.2595, pruned_loss=0.06262, over 953860.03 frames. ], batch size: 30, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:35:38,739 INFO [finetune.py:976] (0/7) Epoch 10, batch 4550, loss[loss=0.1875, simple_loss=0.2537, pruned_loss=0.06071, over 4902.00 frames. ], tot_loss[loss=0.1941, simple_loss=0.2609, pruned_loss=0.06367, over 954508.06 frames. ], batch size: 32, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:35:41,824 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56104.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:35:48,463 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9547, 2.8294, 2.2350, 2.4630, 1.9485, 2.3899, 2.4211, 1.7512], + device='cuda:0'), covar=tensor([0.2503, 0.1304, 0.0942, 0.1409, 0.3189, 0.1139, 0.2286, 0.2765], + device='cuda:0'), in_proj_covar=tensor([0.0300, 0.0321, 0.0232, 0.0293, 0.0320, 0.0274, 0.0260, 0.0283], + device='cuda:0'), out_proj_covar=tensor([1.2117e-04, 1.2935e-04, 9.3162e-05, 1.1750e-04, 1.3082e-04, 1.1009e-04, + 1.0583e-04, 1.1376e-04], device='cuda:0') +2023-04-27 00:35:56,278 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7477, 3.6670, 2.7574, 4.3003, 3.7110, 3.7429, 1.6765, 3.6208], + device='cuda:0'), covar=tensor([0.1895, 0.1183, 0.3472, 0.1875, 0.3478, 0.1888, 0.5644, 0.2375], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0213, 0.0245, 0.0300, 0.0296, 0.0248, 0.0265, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 00:36:03,340 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.622e+01 1.743e+02 2.073e+02 2.397e+02 5.279e+02, threshold=4.145e+02, percent-clipped=1.0 +2023-04-27 00:36:04,068 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6279, 1.6211, 0.6912, 1.3073, 1.6107, 1.4882, 1.4114, 1.3938], + device='cuda:0'), covar=tensor([0.0558, 0.0389, 0.0413, 0.0588, 0.0302, 0.0542, 0.0553, 0.0604], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0025, 0.0023, 0.0030, 0.0020, 0.0029, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0045, 0.0038, 0.0050, 0.0037, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-27 00:36:09,742 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56145.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:36:12,138 INFO [finetune.py:976] (0/7) Epoch 10, batch 4600, loss[loss=0.1589, simple_loss=0.2128, pruned_loss=0.05248, over 4052.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2601, pruned_loss=0.06322, over 952198.27 frames. ], batch size: 17, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:36:40,075 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3028, 1.2716, 3.8519, 3.5289, 3.4063, 3.6281, 3.7054, 3.4390], + device='cuda:0'), covar=tensor([0.6557, 0.5794, 0.1149, 0.2126, 0.1210, 0.1513, 0.1507, 0.1680], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0310, 0.0407, 0.0413, 0.0351, 0.0407, 0.0317, 0.0371], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:36:45,851 INFO [finetune.py:976] (0/7) Epoch 10, batch 4650, loss[loss=0.2057, simple_loss=0.2641, pruned_loss=0.07368, over 4884.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2575, pruned_loss=0.06273, over 953324.73 frames. ], batch size: 32, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:36:49,598 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56204.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:36:50,861 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56206.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:36:57,303 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8207, 3.6317, 2.6550, 4.3484, 3.8164, 3.7792, 1.6298, 3.7770], + device='cuda:0'), covar=tensor([0.1741, 0.1245, 0.3340, 0.1676, 0.3156, 0.1749, 0.5576, 0.2099], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0214, 0.0246, 0.0302, 0.0297, 0.0249, 0.0265, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 00:37:16,885 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.791e+01 1.513e+02 1.802e+02 2.222e+02 6.871e+02, threshold=3.603e+02, percent-clipped=2.0 +2023-04-27 00:37:24,882 INFO [finetune.py:976] (0/7) Epoch 10, batch 4700, loss[loss=0.1744, simple_loss=0.2399, pruned_loss=0.05442, over 4733.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2531, pruned_loss=0.0607, over 952624.80 frames. ], batch size: 59, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:37:36,540 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-27 00:37:57,893 INFO [finetune.py:976] (0/7) Epoch 10, batch 4750, loss[loss=0.1356, simple_loss=0.1954, pruned_loss=0.03792, over 4826.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2504, pruned_loss=0.05999, over 951565.08 frames. ], batch size: 25, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:38:23,671 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.579e+02 1.869e+02 2.281e+02 6.054e+02, threshold=3.738e+02, percent-clipped=4.0 +2023-04-27 00:38:31,956 INFO [finetune.py:976] (0/7) Epoch 10, batch 4800, loss[loss=0.1843, simple_loss=0.2555, pruned_loss=0.05654, over 4927.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2545, pruned_loss=0.06185, over 952221.97 frames. ], batch size: 33, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:39:33,342 INFO [finetune.py:976] (0/7) Epoch 10, batch 4850, loss[loss=0.1739, simple_loss=0.2451, pruned_loss=0.05139, over 4843.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2589, pruned_loss=0.06358, over 952807.04 frames. ], batch size: 49, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:39:43,307 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56404.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:39:43,910 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3992, 3.3248, 1.0996, 1.7250, 1.6955, 2.4091, 1.8745, 1.0763], + device='cuda:0'), covar=tensor([0.1327, 0.0916, 0.1674, 0.1393, 0.1151, 0.0976, 0.1407, 0.2141], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0250, 0.0141, 0.0122, 0.0135, 0.0154, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:40:14,074 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.686e+02 2.043e+02 2.423e+02 5.639e+02, threshold=4.086e+02, percent-clipped=3.0 +2023-04-27 00:40:22,379 INFO [finetune.py:976] (0/7) Epoch 10, batch 4900, loss[loss=0.1667, simple_loss=0.2302, pruned_loss=0.05164, over 4770.00 frames. ], tot_loss[loss=0.1967, simple_loss=0.2624, pruned_loss=0.06548, over 954167.82 frames. ], batch size: 26, lr: 3.73e-03, grad_scale: 32.0 +2023-04-27 00:40:22,451 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.5758, 4.4617, 3.0499, 5.2453, 4.6387, 4.4888, 2.1712, 4.4645], + device='cuda:0'), covar=tensor([0.1369, 0.0849, 0.3183, 0.0836, 0.2589, 0.1619, 0.5590, 0.2038], + device='cuda:0'), in_proj_covar=tensor([0.0239, 0.0213, 0.0245, 0.0300, 0.0294, 0.0246, 0.0264, 0.0265], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 00:40:24,257 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=56452.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:40:40,333 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 00:40:56,297 INFO [finetune.py:976] (0/7) Epoch 10, batch 4950, loss[loss=0.2391, simple_loss=0.2949, pruned_loss=0.0916, over 4930.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2625, pruned_loss=0.06515, over 955375.76 frames. ], batch size: 41, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:40:57,602 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56501.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:40:59,468 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56504.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:41:12,962 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56523.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 00:41:21,844 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.631e+02 2.001e+02 2.395e+02 7.277e+02, threshold=4.002e+02, percent-clipped=1.0 +2023-04-27 00:41:29,726 INFO [finetune.py:976] (0/7) Epoch 10, batch 5000, loss[loss=0.1757, simple_loss=0.2484, pruned_loss=0.05152, over 4818.00 frames. ], tot_loss[loss=0.1943, simple_loss=0.2606, pruned_loss=0.06399, over 956255.22 frames. ], batch size: 38, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:41:32,060 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=56552.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:41:35,799 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9176, 1.4142, 1.5863, 1.7162, 2.1860, 1.7489, 1.4447, 1.4519], + device='cuda:0'), covar=tensor([0.2299, 0.2134, 0.2491, 0.2001, 0.1158, 0.2325, 0.2506, 0.2651], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0322, 0.0356, 0.0299, 0.0338, 0.0321, 0.0307, 0.0362], + device='cuda:0'), out_proj_covar=tensor([6.4473e-05, 6.8172e-05, 7.6760e-05, 6.1767e-05, 7.0738e-05, 6.8621e-05, + 6.5652e-05, 7.7755e-05], device='cuda:0') +2023-04-27 00:41:44,576 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4326, 1.3986, 4.1945, 3.9513, 3.7018, 3.9894, 4.0488, 3.7180], + device='cuda:0'), covar=tensor([0.6849, 0.5475, 0.1116, 0.1763, 0.1048, 0.1677, 0.1075, 0.1489], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0308, 0.0407, 0.0409, 0.0350, 0.0406, 0.0315, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:41:53,721 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56584.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 00:42:03,262 INFO [finetune.py:976] (0/7) Epoch 10, batch 5050, loss[loss=0.1727, simple_loss=0.2424, pruned_loss=0.05147, over 4825.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2564, pruned_loss=0.06175, over 958003.79 frames. ], batch size: 40, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:42:56,295 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.103e+02 1.627e+02 1.925e+02 2.295e+02 4.200e+02, threshold=3.850e+02, percent-clipped=1.0 +2023-04-27 00:43:09,519 INFO [finetune.py:976] (0/7) Epoch 10, batch 5100, loss[loss=0.1845, simple_loss=0.2464, pruned_loss=0.06131, over 4128.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2522, pruned_loss=0.0603, over 955943.04 frames. ], batch size: 18, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:43:59,161 INFO [finetune.py:976] (0/7) Epoch 10, batch 5150, loss[loss=0.178, simple_loss=0.2487, pruned_loss=0.05361, over 4778.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2523, pruned_loss=0.06034, over 956067.93 frames. ], batch size: 28, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:43:59,406 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.57 vs. limit=5.0 +2023-04-27 00:44:13,159 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2122, 2.5432, 0.7614, 1.3949, 1.5391, 1.8976, 1.5521, 0.8138], + device='cuda:0'), covar=tensor([0.1542, 0.1220, 0.1942, 0.1478, 0.1134, 0.0987, 0.1695, 0.1835], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0250, 0.0142, 0.0123, 0.0135, 0.0155, 0.0119, 0.0123], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:44:15,046 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-27 00:44:19,544 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8139, 1.0757, 3.2464, 3.0089, 2.9272, 3.1768, 3.1735, 2.8435], + device='cuda:0'), covar=tensor([0.7901, 0.6002, 0.1515, 0.2322, 0.1474, 0.2313, 0.2091, 0.2009], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0309, 0.0409, 0.0412, 0.0352, 0.0409, 0.0317, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:44:25,421 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.696e+02 2.008e+02 2.313e+02 3.981e+02, threshold=4.015e+02, percent-clipped=1.0 +2023-04-27 00:44:33,209 INFO [finetune.py:976] (0/7) Epoch 10, batch 5200, loss[loss=0.2034, simple_loss=0.2779, pruned_loss=0.06448, over 4808.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2576, pruned_loss=0.06203, over 953793.33 frames. ], batch size: 39, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:45:18,108 INFO [finetune.py:976] (0/7) Epoch 10, batch 5250, loss[loss=0.1531, simple_loss=0.2319, pruned_loss=0.03712, over 4765.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2585, pruned_loss=0.06226, over 952608.71 frames. ], batch size: 28, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:45:20,009 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=56801.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:45:21,235 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56803.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:45:33,851 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3544, 2.0584, 2.4576, 2.7259, 2.7777, 2.1922, 1.8566, 2.3635], + device='cuda:0'), covar=tensor([0.0734, 0.0927, 0.0474, 0.0464, 0.0510, 0.0761, 0.0799, 0.0531], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0202, 0.0181, 0.0174, 0.0177, 0.0187, 0.0159, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:45:42,568 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9092, 2.4453, 1.9158, 1.5762, 1.3764, 1.4034, 2.0065, 1.3524], + device='cuda:0'), covar=tensor([0.1783, 0.1490, 0.1667, 0.2171, 0.2643, 0.2251, 0.1094, 0.2197], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0214, 0.0169, 0.0203, 0.0204, 0.0185, 0.0159, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 00:45:43,652 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.715e+02 1.995e+02 2.714e+02 4.040e+02, threshold=3.989e+02, percent-clipped=1.0 +2023-04-27 00:45:50,967 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-27 00:45:51,393 INFO [finetune.py:976] (0/7) Epoch 10, batch 5300, loss[loss=0.1761, simple_loss=0.257, pruned_loss=0.04757, over 4842.00 frames. ], tot_loss[loss=0.1934, simple_loss=0.26, pruned_loss=0.06335, over 950524.58 frames. ], batch size: 49, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:45:51,451 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=56849.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:45:56,985 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 00:46:01,125 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56864.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:46:11,149 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=56879.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 00:46:25,221 INFO [finetune.py:976] (0/7) Epoch 10, batch 5350, loss[loss=0.1859, simple_loss=0.2515, pruned_loss=0.0602, over 4859.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2601, pruned_loss=0.06292, over 949981.67 frames. ], batch size: 47, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:46:31,353 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56908.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:46:34,530 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.15 vs. limit=5.0 +2023-04-27 00:46:50,603 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.631e+02 1.870e+02 2.324e+02 4.447e+02, threshold=3.741e+02, percent-clipped=2.0 +2023-04-27 00:46:58,344 INFO [finetune.py:976] (0/7) Epoch 10, batch 5400, loss[loss=0.1673, simple_loss=0.2486, pruned_loss=0.04304, over 4868.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2569, pruned_loss=0.06172, over 950014.54 frames. ], batch size: 34, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:47:11,568 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=56969.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:47:28,549 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56993.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:47:29,757 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=56995.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:47:32,094 INFO [finetune.py:976] (0/7) Epoch 10, batch 5450, loss[loss=0.1942, simple_loss=0.243, pruned_loss=0.07274, over 4808.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.254, pruned_loss=0.06075, over 950843.79 frames. ], batch size: 25, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:48:03,871 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.80 vs. limit=5.0 +2023-04-27 00:48:18,179 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.695e+02 1.974e+02 2.503e+02 6.150e+02, threshold=3.947e+02, percent-clipped=1.0 +2023-04-27 00:48:38,379 INFO [finetune.py:976] (0/7) Epoch 10, batch 5500, loss[loss=0.2144, simple_loss=0.2742, pruned_loss=0.07732, over 4778.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.251, pruned_loss=0.05979, over 952304.68 frames. ], batch size: 59, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:48:47,135 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57054.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:48:48,366 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57056.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:49:35,990 INFO [finetune.py:976] (0/7) Epoch 10, batch 5550, loss[loss=0.2587, simple_loss=0.315, pruned_loss=0.1012, over 4203.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.255, pruned_loss=0.06206, over 952651.95 frames. ], batch size: 65, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:49:42,209 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0694, 0.7992, 0.9302, 0.7916, 1.2268, 0.9760, 0.8710, 0.9831], + device='cuda:0'), covar=tensor([0.1626, 0.1478, 0.2170, 0.1557, 0.0843, 0.1398, 0.1810, 0.1956], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0319, 0.0353, 0.0297, 0.0333, 0.0318, 0.0305, 0.0359], + device='cuda:0'), out_proj_covar=tensor([6.3980e-05, 6.7590e-05, 7.6172e-05, 6.1411e-05, 6.9647e-05, 6.8057e-05, + 6.5217e-05, 7.7190e-05], device='cuda:0') +2023-04-27 00:49:58,881 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9508, 4.2773, 0.8148, 2.1742, 2.3281, 2.8610, 2.4717, 0.8488], + device='cuda:0'), covar=tensor([0.1289, 0.0946, 0.2053, 0.1326, 0.1022, 0.1022, 0.1398, 0.2214], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0247, 0.0141, 0.0121, 0.0133, 0.0152, 0.0117, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:49:59,957 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.628e+02 1.818e+02 2.115e+02 5.040e+02, threshold=3.636e+02, percent-clipped=1.0 +2023-04-27 00:50:06,925 INFO [finetune.py:976] (0/7) Epoch 10, batch 5600, loss[loss=0.2334, simple_loss=0.3011, pruned_loss=0.08281, over 4841.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2586, pruned_loss=0.06277, over 952091.96 frames. ], batch size: 44, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:50:12,760 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57159.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:50:13,959 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1321, 1.4519, 1.2825, 1.6748, 1.5443, 1.8745, 1.3032, 3.5440], + device='cuda:0'), covar=tensor([0.0658, 0.0828, 0.0855, 0.1262, 0.0676, 0.0547, 0.0824, 0.0143], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 00:50:24,870 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57179.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 00:50:29,394 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57186.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:50:34,121 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-27 00:50:35,751 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57197.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:50:36,383 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9376, 2.5284, 1.8463, 1.9698, 1.3944, 1.3506, 2.1437, 1.3019], + device='cuda:0'), covar=tensor([0.1792, 0.1732, 0.1635, 0.1881, 0.2509, 0.2059, 0.1055, 0.2193], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0214, 0.0170, 0.0203, 0.0204, 0.0186, 0.0159, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 00:50:36,870 INFO [finetune.py:976] (0/7) Epoch 10, batch 5650, loss[loss=0.1709, simple_loss=0.2554, pruned_loss=0.04321, over 4802.00 frames. ], tot_loss[loss=0.1947, simple_loss=0.2622, pruned_loss=0.06366, over 954436.46 frames. ], batch size: 41, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:50:39,491 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1278, 1.9244, 2.4102, 2.5376, 1.8695, 1.5650, 1.8749, 1.0469], + device='cuda:0'), covar=tensor([0.0645, 0.0960, 0.0553, 0.0888, 0.0885, 0.1480, 0.0967, 0.1126], + device='cuda:0'), in_proj_covar=tensor([0.0066, 0.0072, 0.0070, 0.0067, 0.0076, 0.0096, 0.0077, 0.0072], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 00:50:53,684 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=57227.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 00:50:59,545 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.003e+02 1.673e+02 1.923e+02 2.288e+02 6.250e+02, threshold=3.847e+02, percent-clipped=4.0 +2023-04-27 00:51:00,945 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.54 vs. limit=5.0 +2023-04-27 00:51:03,824 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57244.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:51:05,612 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57247.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:51:06,699 INFO [finetune.py:976] (0/7) Epoch 10, batch 5700, loss[loss=0.176, simple_loss=0.2348, pruned_loss=0.05863, over 4473.00 frames. ], tot_loss[loss=0.1937, simple_loss=0.2591, pruned_loss=0.06409, over 934206.09 frames. ], batch size: 19, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:51:06,798 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2738, 3.0329, 2.5149, 2.6980, 2.1771, 2.5442, 2.4979, 2.1124], + device='cuda:0'), covar=tensor([0.1807, 0.0862, 0.0724, 0.0997, 0.2777, 0.1081, 0.1459, 0.1967], + device='cuda:0'), in_proj_covar=tensor([0.0297, 0.0316, 0.0229, 0.0288, 0.0316, 0.0270, 0.0255, 0.0278], + device='cuda:0'), out_proj_covar=tensor([1.1977e-04, 1.2734e-04, 9.1786e-05, 1.1525e-04, 1.2941e-04, 1.0868e-04, + 1.0393e-04, 1.1153e-04], device='cuda:0') +2023-04-27 00:51:12,267 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57258.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 00:51:15,766 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57264.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:51:20,015 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0060, 1.8589, 2.0019, 2.1970, 2.2701, 1.8766, 1.5652, 2.0764], + device='cuda:0'), covar=tensor([0.0834, 0.1096, 0.0706, 0.0660, 0.0622, 0.0891, 0.0946, 0.0616], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0201, 0.0180, 0.0173, 0.0176, 0.0187, 0.0158, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:51:23,784 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-10.pt +2023-04-27 00:51:38,656 INFO [finetune.py:976] (0/7) Epoch 11, batch 0, loss[loss=0.2059, simple_loss=0.2721, pruned_loss=0.06989, over 4814.00 frames. ], tot_loss[loss=0.2059, simple_loss=0.2721, pruned_loss=0.06989, over 4814.00 frames. ], batch size: 33, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:51:38,657 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 00:51:55,310 INFO [finetune.py:1010] (0/7) Epoch 11, validation: loss=0.1558, simple_loss=0.2272, pruned_loss=0.04225, over 2265189.00 frames. +2023-04-27 00:51:55,311 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-27 00:52:28,820 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57305.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:52:34,739 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8409, 1.2425, 1.4202, 1.5447, 1.9900, 1.6241, 1.3355, 1.3892], + device='cuda:0'), covar=tensor([0.2049, 0.1836, 0.2567, 0.1607, 0.1076, 0.1754, 0.2418, 0.2290], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0321, 0.0356, 0.0300, 0.0335, 0.0320, 0.0307, 0.0362], + device='cuda:0'), out_proj_covar=tensor([6.4488e-05, 6.8053e-05, 7.6719e-05, 6.1878e-05, 7.0120e-05, 6.8374e-05, + 6.5712e-05, 7.7799e-05], device='cuda:0') +2023-04-27 00:52:42,962 INFO [finetune.py:976] (0/7) Epoch 11, batch 50, loss[loss=0.2028, simple_loss=0.277, pruned_loss=0.06432, over 4776.00 frames. ], tot_loss[loss=0.1964, simple_loss=0.2639, pruned_loss=0.06448, over 217279.03 frames. ], batch size: 29, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:52:49,895 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.652e+02 2.063e+02 2.429e+02 4.586e+02, threshold=4.127e+02, percent-clipped=3.0 +2023-04-27 00:52:57,812 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57349.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:52:59,008 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57351.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:53:20,600 INFO [finetune.py:976] (0/7) Epoch 11, batch 100, loss[loss=0.1573, simple_loss=0.227, pruned_loss=0.04383, over 4872.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2556, pruned_loss=0.06233, over 381037.33 frames. ], batch size: 34, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:53:34,833 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9157, 1.7693, 2.0184, 2.3136, 2.4391, 1.8823, 1.5858, 2.1133], + device='cuda:0'), covar=tensor([0.0899, 0.1124, 0.0648, 0.0632, 0.0550, 0.0833, 0.0922, 0.0547], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0201, 0.0180, 0.0173, 0.0176, 0.0186, 0.0158, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:53:40,243 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57398.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:54:10,100 INFO [finetune.py:976] (0/7) Epoch 11, batch 150, loss[loss=0.2215, simple_loss=0.2765, pruned_loss=0.08323, over 4902.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2513, pruned_loss=0.06061, over 508091.27 frames. ], batch size: 43, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:54:27,173 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.639e+02 1.968e+02 2.285e+02 5.137e+02, threshold=3.937e+02, percent-clipped=1.0 +2023-04-27 00:54:37,554 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 00:54:52,699 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:54:52,722 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57459.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:55:00,027 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57462.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:55:02,958 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5595, 1.2885, 4.2992, 4.0117, 3.7252, 4.0011, 3.8991, 3.7846], + device='cuda:0'), covar=tensor([0.7376, 0.6114, 0.1064, 0.1759, 0.1256, 0.1716, 0.2092, 0.1661], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0307, 0.0406, 0.0408, 0.0350, 0.0406, 0.0315, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:55:02,987 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6549, 1.6857, 0.8725, 1.3604, 1.9081, 1.5249, 1.4231, 1.4569], + device='cuda:0'), covar=tensor([0.0456, 0.0366, 0.0375, 0.0550, 0.0286, 0.0502, 0.0510, 0.0532], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0025, 0.0023, 0.0030, 0.0020, 0.0029, 0.0029, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0037, 0.0050, 0.0037, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-27 00:55:10,526 INFO [finetune.py:976] (0/7) Epoch 11, batch 200, loss[loss=0.1973, simple_loss=0.2727, pruned_loss=0.06094, over 4855.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2496, pruned_loss=0.06045, over 608001.89 frames. ], batch size: 44, lr: 3.72e-03, grad_scale: 32.0 +2023-04-27 00:55:18,960 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8804, 1.8695, 2.1022, 2.1599, 1.9855, 1.7189, 1.8755, 1.9740], + device='cuda:0'), covar=tensor([0.7534, 0.9940, 1.2099, 1.2115, 0.8920, 1.5731, 1.4331, 1.2666], + device='cuda:0'), in_proj_covar=tensor([0.0407, 0.0412, 0.0497, 0.0516, 0.0436, 0.0456, 0.0467, 0.0466], + device='cuda:0'), out_proj_covar=tensor([9.9191e-05, 1.0213e-04, 1.1216e-04, 1.2270e-04, 1.0566e-04, 1.1033e-04, + 1.1196e-04, 1.1214e-04], device='cuda:0') +2023-04-27 00:55:20,201 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.93 vs. limit=5.0 +2023-04-27 00:55:22,546 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57495.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:55:30,369 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=57507.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:55:40,649 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57523.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:55:42,940 INFO [finetune.py:976] (0/7) Epoch 11, batch 250, loss[loss=0.2117, simple_loss=0.2787, pruned_loss=0.07241, over 4822.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2553, pruned_loss=0.063, over 685356.97 frames. ], batch size: 33, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 00:55:50,073 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.267e+02 1.758e+02 2.095e+02 2.670e+02 5.121e+02, threshold=4.190e+02, percent-clipped=8.0 +2023-04-27 00:55:52,333 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.0726, 3.8855, 2.8222, 4.6185, 3.9730, 4.0557, 1.7117, 3.9466], + device='cuda:0'), covar=tensor([0.1459, 0.1123, 0.3044, 0.1528, 0.4043, 0.1732, 0.5734, 0.2136], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0214, 0.0248, 0.0301, 0.0296, 0.0248, 0.0267, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 00:55:54,611 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57542.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:56:01,283 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57553.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 00:56:03,128 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57556.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:56:04,899 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9943, 1.9196, 2.2554, 2.5154, 2.0997, 1.9394, 2.0655, 2.0434], + device='cuda:0'), covar=tensor([0.6278, 0.8257, 0.9215, 0.7924, 0.7461, 1.0243, 1.0654, 1.0198], + device='cuda:0'), in_proj_covar=tensor([0.0407, 0.0412, 0.0497, 0.0516, 0.0437, 0.0457, 0.0467, 0.0466], + device='cuda:0'), out_proj_covar=tensor([9.9207e-05, 1.0217e-04, 1.1214e-04, 1.2278e-04, 1.0577e-04, 1.1042e-04, + 1.1193e-04, 1.1199e-04], device='cuda:0') +2023-04-27 00:56:07,254 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4638, 1.3761, 4.2521, 3.9273, 3.7502, 4.0325, 3.9951, 3.7669], + device='cuda:0'), covar=tensor([0.7125, 0.5976, 0.1259, 0.1960, 0.1297, 0.1758, 0.1452, 0.1566], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0305, 0.0405, 0.0408, 0.0349, 0.0405, 0.0314, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:56:08,499 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57564.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:56:16,735 INFO [finetune.py:976] (0/7) Epoch 11, batch 300, loss[loss=0.1367, simple_loss=0.2121, pruned_loss=0.03063, over 4680.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2577, pruned_loss=0.06278, over 746142.09 frames. ], batch size: 23, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 00:56:17,571 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.04 vs. limit=5.0 +2023-04-27 00:56:26,851 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3071, 3.2690, 2.3868, 3.8743, 3.2626, 3.3565, 1.5030, 3.2628], + device='cuda:0'), covar=tensor([0.1945, 0.1432, 0.3276, 0.2258, 0.3345, 0.1826, 0.5749, 0.2537], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0215, 0.0249, 0.0303, 0.0297, 0.0249, 0.0268, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 00:56:32,859 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57600.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:56:34,198 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 00:56:40,094 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=57612.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:56:50,090 INFO [finetune.py:976] (0/7) Epoch 11, batch 350, loss[loss=0.181, simple_loss=0.2543, pruned_loss=0.05387, over 4907.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2583, pruned_loss=0.06248, over 793140.35 frames. ], batch size: 37, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 00:56:56,642 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.613e+02 1.965e+02 2.419e+02 4.062e+02, threshold=3.929e+02, percent-clipped=0.0 +2023-04-27 00:57:11,403 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57649.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:57:13,072 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57651.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:57:46,783 INFO [finetune.py:976] (0/7) Epoch 11, batch 400, loss[loss=0.1673, simple_loss=0.235, pruned_loss=0.0498, over 4805.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2585, pruned_loss=0.06209, over 829210.89 frames. ], batch size: 55, lr: 3.71e-03, grad_scale: 64.0 +2023-04-27 00:58:06,580 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7510, 2.4316, 1.9914, 2.1490, 1.7674, 2.0074, 1.9133, 1.6446], + device='cuda:0'), covar=tensor([0.2295, 0.1267, 0.0886, 0.1433, 0.3500, 0.1235, 0.2135, 0.2483], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0315, 0.0228, 0.0285, 0.0314, 0.0267, 0.0253, 0.0276], + device='cuda:0'), out_proj_covar=tensor([1.1856e-04, 1.2659e-04, 9.1447e-05, 1.1417e-04, 1.2835e-04, 1.0747e-04, + 1.0324e-04, 1.1079e-04], device='cuda:0') +2023-04-27 00:58:10,646 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=57697.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:58:12,353 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=57699.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:58:27,230 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8755, 1.4163, 1.6468, 1.7184, 1.6062, 1.3405, 0.7582, 1.3512], + device='cuda:0'), covar=tensor([0.3502, 0.3443, 0.1783, 0.2305, 0.2683, 0.2765, 0.4413, 0.2318], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0248, 0.0220, 0.0317, 0.0213, 0.0227, 0.0232, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 00:58:30,757 INFO [finetune.py:976] (0/7) Epoch 11, batch 450, loss[loss=0.203, simple_loss=0.2654, pruned_loss=0.07027, over 4896.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2579, pruned_loss=0.062, over 856753.90 frames. ], batch size: 46, lr: 3.71e-03, grad_scale: 64.0 +2023-04-27 00:58:33,257 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 00:58:34,981 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=57733.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:58:37,317 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.069e+02 1.604e+02 1.948e+02 2.335e+02 4.408e+02, threshold=3.896e+02, percent-clipped=1.0 +2023-04-27 00:59:00,416 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57754.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:59:32,060 INFO [finetune.py:976] (0/7) Epoch 11, batch 500, loss[loss=0.2178, simple_loss=0.2604, pruned_loss=0.08755, over 4855.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2559, pruned_loss=0.06161, over 879885.74 frames. ], batch size: 44, lr: 3.71e-03, grad_scale: 64.0 +2023-04-27 00:59:36,340 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0362, 1.5048, 1.8895, 2.0677, 1.8359, 1.4887, 1.0214, 1.5036], + device='cuda:0'), covar=tensor([0.3292, 0.3536, 0.1714, 0.2490, 0.2683, 0.2718, 0.4991, 0.2534], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0250, 0.0222, 0.0319, 0.0214, 0.0229, 0.0234, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 00:59:48,114 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1950, 2.0028, 2.2466, 2.5250, 2.6618, 2.0008, 1.7937, 2.3438], + device='cuda:0'), covar=tensor([0.0816, 0.0939, 0.0537, 0.0539, 0.0439, 0.0895, 0.0806, 0.0514], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0200, 0.0180, 0.0172, 0.0175, 0.0185, 0.0157, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 00:59:54,239 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=57794.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 00:59:59,051 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0754, 1.8725, 2.4182, 2.4164, 1.7513, 1.6306, 1.9171, 1.3684], + device='cuda:0'), covar=tensor([0.0687, 0.1028, 0.0548, 0.1032, 0.1089, 0.1359, 0.0947, 0.0926], + device='cuda:0'), in_proj_covar=tensor([0.0065, 0.0071, 0.0069, 0.0066, 0.0074, 0.0094, 0.0075, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 01:00:25,867 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57818.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:00:31,728 INFO [finetune.py:976] (0/7) Epoch 11, batch 550, loss[loss=0.1683, simple_loss=0.2371, pruned_loss=0.04978, over 4844.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2517, pruned_loss=0.06041, over 897356.31 frames. ], batch size: 47, lr: 3.71e-03, grad_scale: 64.0 +2023-04-27 01:00:38,242 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.623e+02 1.943e+02 2.373e+02 5.716e+02, threshold=3.887e+02, percent-clipped=4.0 +2023-04-27 01:00:41,338 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57842.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:00:47,290 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=57851.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:00:49,050 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57853.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 01:00:52,577 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.6132, 4.5463, 3.1172, 5.3768, 4.6483, 4.6276, 2.0630, 4.6694], + device='cuda:0'), covar=tensor([0.1325, 0.0833, 0.2984, 0.0861, 0.3251, 0.1458, 0.5479, 0.1793], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0215, 0.0248, 0.0301, 0.0297, 0.0249, 0.0268, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 01:01:04,583 INFO [finetune.py:976] (0/7) Epoch 11, batch 600, loss[loss=0.1336, simple_loss=0.2054, pruned_loss=0.03089, over 4753.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2521, pruned_loss=0.0604, over 911350.67 frames. ], batch size: 27, lr: 3.71e-03, grad_scale: 64.0 +2023-04-27 01:01:13,407 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=57890.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:01:20,163 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=57900.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:01:20,734 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=57901.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:01:38,049 INFO [finetune.py:976] (0/7) Epoch 11, batch 650, loss[loss=0.2159, simple_loss=0.2819, pruned_loss=0.07492, over 4812.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.255, pruned_loss=0.06103, over 922094.10 frames. ], batch size: 25, lr: 3.71e-03, grad_scale: 64.0 +2023-04-27 01:01:45,068 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.147e+02 1.753e+02 2.161e+02 2.818e+02 6.431e+02, threshold=4.321e+02, percent-clipped=6.0 +2023-04-27 01:01:51,793 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=57948.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:01:57,773 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-27 01:02:11,760 INFO [finetune.py:976] (0/7) Epoch 11, batch 700, loss[loss=0.1818, simple_loss=0.2581, pruned_loss=0.05277, over 4894.00 frames. ], tot_loss[loss=0.1908, simple_loss=0.2576, pruned_loss=0.06199, over 928606.04 frames. ], batch size: 35, lr: 3.71e-03, grad_scale: 64.0 +2023-04-27 01:02:37,628 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-58000.pt +2023-04-27 01:02:57,544 INFO [finetune.py:976] (0/7) Epoch 11, batch 750, loss[loss=0.1737, simple_loss=0.2511, pruned_loss=0.04811, over 4893.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2591, pruned_loss=0.06257, over 934497.56 frames. ], batch size: 43, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:03:04,227 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.766e+02 1.999e+02 2.458e+02 5.140e+02, threshold=3.998e+02, percent-clipped=2.0 +2023-04-27 01:03:14,988 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58054.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:03:31,278 INFO [finetune.py:976] (0/7) Epoch 11, batch 800, loss[loss=0.1933, simple_loss=0.266, pruned_loss=0.06033, over 4812.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2582, pruned_loss=0.06125, over 940139.77 frames. ], batch size: 40, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:03:38,600 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58089.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:03:47,431 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=58102.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:03:58,164 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58118.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:04:04,032 INFO [finetune.py:976] (0/7) Epoch 11, batch 850, loss[loss=0.1835, simple_loss=0.2543, pruned_loss=0.05635, over 4788.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2562, pruned_loss=0.06102, over 942623.76 frames. ], batch size: 29, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:04:09,574 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1346, 2.2871, 1.8976, 1.8944, 2.3244, 1.9031, 2.8707, 1.6588], + device='cuda:0'), covar=tensor([0.3909, 0.1999, 0.4771, 0.3036, 0.1774, 0.2590, 0.1352, 0.4505], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0346, 0.0426, 0.0358, 0.0385, 0.0382, 0.0379, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 01:04:10,688 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.590e+02 1.973e+02 2.582e+02 4.894e+02, threshold=3.945e+02, percent-clipped=3.0 +2023-04-27 01:04:12,611 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58141.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:04:14,894 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58144.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:04:19,580 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58151.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:04:39,243 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=58166.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:04:52,830 INFO [finetune.py:976] (0/7) Epoch 11, batch 900, loss[loss=0.2023, simple_loss=0.2598, pruned_loss=0.07238, over 4836.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2533, pruned_loss=0.05989, over 947153.82 frames. ], batch size: 30, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:05:19,781 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=58199.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:05:22,305 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58202.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:05:24,170 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58205.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:05:54,762 INFO [finetune.py:976] (0/7) Epoch 11, batch 950, loss[loss=0.2251, simple_loss=0.2946, pruned_loss=0.0778, over 4923.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2522, pruned_loss=0.05988, over 948627.65 frames. ], batch size: 42, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:06:10,247 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.752e+02 2.086e+02 2.428e+02 7.149e+02, threshold=4.172e+02, percent-clipped=3.0 +2023-04-27 01:06:47,387 INFO [finetune.py:976] (0/7) Epoch 11, batch 1000, loss[loss=0.2176, simple_loss=0.2858, pruned_loss=0.07472, over 4820.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2566, pruned_loss=0.06171, over 952472.88 frames. ], batch size: 40, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:07:20,925 INFO [finetune.py:976] (0/7) Epoch 11, batch 1050, loss[loss=0.1426, simple_loss=0.2151, pruned_loss=0.03502, over 4764.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2587, pruned_loss=0.06214, over 953507.13 frames. ], batch size: 28, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:07:28,201 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.762e+02 2.120e+02 2.435e+02 5.017e+02, threshold=4.240e+02, percent-clipped=2.0 +2023-04-27 01:07:38,129 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6456, 3.1661, 1.1461, 1.9246, 2.0212, 2.4068, 2.0039, 1.2676], + device='cuda:0'), covar=tensor([0.1127, 0.0911, 0.1585, 0.1066, 0.0799, 0.0879, 0.1355, 0.1524], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0247, 0.0139, 0.0121, 0.0133, 0.0152, 0.0117, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 01:07:44,742 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1746, 1.5159, 1.3535, 1.7525, 1.5846, 1.7871, 1.3927, 3.0953], + device='cuda:0'), covar=tensor([0.0687, 0.0845, 0.0880, 0.1211, 0.0680, 0.0496, 0.0804, 0.0193], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 01:07:52,945 INFO [finetune.py:976] (0/7) Epoch 11, batch 1100, loss[loss=0.236, simple_loss=0.2932, pruned_loss=0.08942, over 4921.00 frames. ], tot_loss[loss=0.1921, simple_loss=0.2596, pruned_loss=0.06228, over 953763.77 frames. ], batch size: 33, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:08:01,691 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58389.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:08:26,953 INFO [finetune.py:976] (0/7) Epoch 11, batch 1150, loss[loss=0.1763, simple_loss=0.2606, pruned_loss=0.04605, over 4828.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2612, pruned_loss=0.06269, over 955265.34 frames. ], batch size: 49, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:08:34,041 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=58437.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:08:35,088 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.681e+02 2.024e+02 2.460e+02 8.000e+02, threshold=4.047e+02, percent-clipped=3.0 +2023-04-27 01:08:59,902 INFO [finetune.py:976] (0/7) Epoch 11, batch 1200, loss[loss=0.2047, simple_loss=0.2755, pruned_loss=0.067, over 4905.00 frames. ], tot_loss[loss=0.1914, simple_loss=0.2589, pruned_loss=0.06194, over 954741.04 frames. ], batch size: 36, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:09:13,879 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58497.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:09:15,680 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=58500.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:09:32,952 INFO [finetune.py:976] (0/7) Epoch 11, batch 1250, loss[loss=0.1701, simple_loss=0.2397, pruned_loss=0.05021, over 4772.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2564, pruned_loss=0.06168, over 953613.15 frames. ], batch size: 28, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:09:34,727 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3785, 1.4218, 4.2100, 3.9561, 3.7241, 3.9312, 3.9638, 3.7801], + device='cuda:0'), covar=tensor([0.7038, 0.5378, 0.1145, 0.1710, 0.1125, 0.1495, 0.1611, 0.1361], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0305, 0.0404, 0.0407, 0.0348, 0.0405, 0.0314, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 01:09:41,124 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.214e+02 1.641e+02 1.959e+02 2.303e+02 4.487e+02, threshold=3.918e+02, percent-clipped=1.0 +2023-04-27 01:10:19,350 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1811, 3.2100, 1.9994, 2.3561, 1.5480, 1.3950, 2.1504, 1.4108], + device='cuda:0'), covar=tensor([0.1794, 0.1231, 0.1529, 0.1656, 0.2488, 0.2111, 0.1150, 0.2161], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0214, 0.0168, 0.0202, 0.0202, 0.0185, 0.0158, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 01:10:22,918 INFO [finetune.py:976] (0/7) Epoch 11, batch 1300, loss[loss=0.1837, simple_loss=0.2439, pruned_loss=0.0618, over 4874.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2533, pruned_loss=0.06034, over 955705.68 frames. ], batch size: 31, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:10:43,379 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2317, 1.7163, 2.0456, 2.3822, 2.0349, 1.6085, 1.1711, 1.7628], + device='cuda:0'), covar=tensor([0.3695, 0.3786, 0.1959, 0.2492, 0.2885, 0.2981, 0.5067, 0.2506], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0249, 0.0221, 0.0317, 0.0214, 0.0228, 0.0233, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 01:10:53,677 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7780, 3.7115, 2.7365, 4.3106, 3.7567, 3.6982, 1.7386, 3.7796], + device='cuda:0'), covar=tensor([0.1723, 0.1299, 0.3463, 0.1749, 0.3574, 0.1981, 0.5749, 0.2205], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0216, 0.0248, 0.0303, 0.0298, 0.0250, 0.0268, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 01:10:55,529 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9947, 2.5792, 1.0486, 1.3191, 2.0528, 1.1728, 3.3365, 1.6729], + device='cuda:0'), covar=tensor([0.0684, 0.0668, 0.0809, 0.1210, 0.0453, 0.1042, 0.0242, 0.0580], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0066, 0.0049, 0.0047, 0.0051, 0.0052, 0.0078, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 01:11:06,104 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-27 01:11:29,575 INFO [finetune.py:976] (0/7) Epoch 11, batch 1350, loss[loss=0.2052, simple_loss=0.2681, pruned_loss=0.07112, over 4855.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2535, pruned_loss=0.0609, over 955974.52 frames. ], batch size: 49, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:11:46,674 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.586e+02 1.956e+02 2.328e+02 4.668e+02, threshold=3.913e+02, percent-clipped=1.0 +2023-04-27 01:11:46,880 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 01:12:01,162 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.0342, 3.9000, 2.8803, 4.6116, 4.0404, 3.9259, 1.7744, 3.9673], + device='cuda:0'), covar=tensor([0.1769, 0.1174, 0.3237, 0.1426, 0.4497, 0.2045, 0.6106, 0.2312], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0215, 0.0247, 0.0301, 0.0296, 0.0250, 0.0267, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 01:12:34,429 INFO [finetune.py:976] (0/7) Epoch 11, batch 1400, loss[loss=0.1918, simple_loss=0.2795, pruned_loss=0.05206, over 4807.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2572, pruned_loss=0.06184, over 955227.52 frames. ], batch size: 45, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:12:42,445 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58681.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:13:42,641 INFO [finetune.py:976] (0/7) Epoch 11, batch 1450, loss[loss=0.2171, simple_loss=0.2767, pruned_loss=0.07878, over 4824.00 frames. ], tot_loss[loss=0.192, simple_loss=0.2591, pruned_loss=0.06245, over 954148.67 frames. ], batch size: 30, lr: 3.71e-03, grad_scale: 32.0 +2023-04-27 01:14:01,639 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.919e+01 1.615e+02 1.986e+02 2.296e+02 3.609e+02, threshold=3.972e+02, percent-clipped=0.0 +2023-04-27 01:14:04,692 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58742.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:14:13,970 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-04-27 01:14:28,707 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1904, 2.6426, 1.1116, 1.3558, 1.9109, 1.2463, 3.6321, 1.7547], + device='cuda:0'), covar=tensor([0.0639, 0.0686, 0.0793, 0.1290, 0.0527, 0.1030, 0.0248, 0.0644], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0067, 0.0050, 0.0047, 0.0051, 0.0053, 0.0078, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 01:14:37,114 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58775.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:14:38,276 INFO [finetune.py:976] (0/7) Epoch 11, batch 1500, loss[loss=0.1933, simple_loss=0.2722, pruned_loss=0.05718, over 4809.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.2589, pruned_loss=0.06201, over 953942.05 frames. ], batch size: 47, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:14:52,523 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58797.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:14:54,506 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=58800.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:14:59,676 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0241, 1.5147, 1.8971, 2.1832, 1.8045, 1.4774, 0.9607, 1.4642], + device='cuda:0'), covar=tensor([0.4100, 0.3952, 0.1962, 0.2606, 0.3021, 0.3013, 0.4975, 0.2764], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0250, 0.0223, 0.0318, 0.0215, 0.0228, 0.0233, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 01:15:02,715 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-27 01:15:11,710 INFO [finetune.py:976] (0/7) Epoch 11, batch 1550, loss[loss=0.1611, simple_loss=0.2309, pruned_loss=0.04567, over 4926.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2591, pruned_loss=0.06141, over 954703.13 frames. ], batch size: 33, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:15:18,183 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58836.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:15:19,338 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.373e+01 1.631e+02 1.941e+02 2.305e+02 6.407e+02, threshold=3.882e+02, percent-clipped=1.0 +2023-04-27 01:15:24,172 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=58845.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:15:26,496 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=58848.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:15:44,971 INFO [finetune.py:976] (0/7) Epoch 11, batch 1600, loss[loss=0.1946, simple_loss=0.257, pruned_loss=0.06613, over 4790.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.258, pruned_loss=0.06145, over 953502.19 frames. ], batch size: 29, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:16:05,011 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=58906.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:16:18,093 INFO [finetune.py:976] (0/7) Epoch 11, batch 1650, loss[loss=0.1755, simple_loss=0.2379, pruned_loss=0.05658, over 4874.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.2561, pruned_loss=0.06111, over 954520.52 frames. ], batch size: 34, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:16:22,622 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-04-27 01:16:25,722 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 1.662e+02 1.988e+02 2.360e+02 4.922e+02, threshold=3.975e+02, percent-clipped=3.0 +2023-04-27 01:16:35,831 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8507, 1.6758, 1.9000, 2.2137, 2.2796, 1.7077, 1.2895, 2.0114], + device='cuda:0'), covar=tensor([0.0801, 0.1114, 0.0794, 0.0553, 0.0525, 0.0858, 0.0953, 0.0521], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0203, 0.0182, 0.0175, 0.0178, 0.0188, 0.0160, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 01:17:07,505 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=58967.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:17:08,720 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1930, 1.5741, 1.3352, 1.7824, 1.6146, 2.0064, 1.3920, 3.5859], + device='cuda:0'), covar=tensor([0.0607, 0.0785, 0.0782, 0.1162, 0.0634, 0.0523, 0.0762, 0.0151], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 01:17:13,432 INFO [finetune.py:976] (0/7) Epoch 11, batch 1700, loss[loss=0.1799, simple_loss=0.2484, pruned_loss=0.05572, over 4911.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2536, pruned_loss=0.06029, over 956373.45 frames. ], batch size: 43, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:17:47,002 INFO [finetune.py:976] (0/7) Epoch 11, batch 1750, loss[loss=0.1781, simple_loss=0.2438, pruned_loss=0.05624, over 4777.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2562, pruned_loss=0.06152, over 956820.97 frames. ], batch size: 28, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:17:53,221 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59037.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:17:53,741 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.792e+02 2.172e+02 2.715e+02 5.550e+02, threshold=4.344e+02, percent-clipped=4.0 +2023-04-27 01:18:30,829 INFO [finetune.py:976] (0/7) Epoch 11, batch 1800, loss[loss=0.2016, simple_loss=0.2709, pruned_loss=0.06616, over 4749.00 frames. ], tot_loss[loss=0.1911, simple_loss=0.2587, pruned_loss=0.06173, over 957773.47 frames. ], batch size: 27, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:18:34,623 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1763, 1.5498, 1.3171, 1.7497, 1.5652, 1.9583, 1.4021, 3.4850], + device='cuda:0'), covar=tensor([0.0663, 0.0815, 0.0841, 0.1175, 0.0659, 0.0536, 0.0781, 0.0138], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 01:18:38,255 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1844, 1.7223, 1.5511, 2.0714, 1.8963, 2.0510, 1.6027, 4.3703], + device='cuda:0'), covar=tensor([0.0626, 0.0762, 0.0796, 0.1134, 0.0622, 0.0531, 0.0725, 0.0115], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 01:19:09,377 INFO [finetune.py:976] (0/7) Epoch 11, batch 1850, loss[loss=0.236, simple_loss=0.2866, pruned_loss=0.09271, over 4883.00 frames. ], tot_loss[loss=0.193, simple_loss=0.2609, pruned_loss=0.06258, over 957862.53 frames. ], batch size: 32, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:19:11,899 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59131.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:19:21,280 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.048e+02 1.699e+02 2.019e+02 2.381e+02 7.878e+02, threshold=4.039e+02, percent-clipped=1.0 +2023-04-27 01:19:52,227 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59164.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:20:10,994 INFO [finetune.py:976] (0/7) Epoch 11, batch 1900, loss[loss=0.2454, simple_loss=0.2959, pruned_loss=0.09747, over 4802.00 frames. ], tot_loss[loss=0.1952, simple_loss=0.263, pruned_loss=0.06365, over 958665.73 frames. ], batch size: 45, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:20:21,662 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9206, 2.7859, 2.2078, 2.4319, 1.9095, 2.3342, 2.4410, 1.5264], + device='cuda:0'), covar=tensor([0.2456, 0.1241, 0.0874, 0.1525, 0.3341, 0.1254, 0.2296, 0.3293], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0314, 0.0229, 0.0285, 0.0314, 0.0270, 0.0254, 0.0275], + device='cuda:0'), out_proj_covar=tensor([1.1911e-04, 1.2628e-04, 9.1613e-05, 1.1399e-04, 1.2839e-04, 1.0841e-04, + 1.0362e-04, 1.1031e-04], device='cuda:0') +2023-04-27 01:21:00,114 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59225.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:21:01,213 INFO [finetune.py:976] (0/7) Epoch 11, batch 1950, loss[loss=0.1868, simple_loss=0.2503, pruned_loss=0.0617, over 4876.00 frames. ], tot_loss[loss=0.1935, simple_loss=0.2612, pruned_loss=0.06285, over 956362.31 frames. ], batch size: 31, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:21:08,362 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.114e+02 1.600e+02 1.889e+02 2.365e+02 4.581e+02, threshold=3.778e+02, percent-clipped=1.0 +2023-04-27 01:21:24,549 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59262.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:21:35,025 INFO [finetune.py:976] (0/7) Epoch 11, batch 2000, loss[loss=0.1614, simple_loss=0.2359, pruned_loss=0.04342, over 4915.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2572, pruned_loss=0.06114, over 956523.43 frames. ], batch size: 32, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:22:08,862 INFO [finetune.py:976] (0/7) Epoch 11, batch 2050, loss[loss=0.1624, simple_loss=0.2345, pruned_loss=0.04519, over 4802.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2537, pruned_loss=0.06007, over 957641.11 frames. ], batch size: 51, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:22:09,682 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-27 01:22:15,378 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59337.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:22:15,893 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.589e+02 2.008e+02 2.324e+02 3.763e+02, threshold=4.015e+02, percent-clipped=0.0 +2023-04-27 01:22:16,041 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8629, 1.4223, 1.4762, 1.5900, 2.0764, 1.6720, 1.4134, 1.4510], + device='cuda:0'), covar=tensor([0.1328, 0.1700, 0.1677, 0.1503, 0.0810, 0.1881, 0.2223, 0.1966], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0320, 0.0351, 0.0298, 0.0335, 0.0319, 0.0305, 0.0361], + device='cuda:0'), out_proj_covar=tensor([6.4342e-05, 6.7619e-05, 7.5369e-05, 6.1440e-05, 7.0062e-05, 6.8027e-05, + 6.5224e-05, 7.7488e-05], device='cuda:0') +2023-04-27 01:22:48,352 INFO [finetune.py:976] (0/7) Epoch 11, batch 2100, loss[loss=0.2334, simple_loss=0.304, pruned_loss=0.08144, over 4927.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2534, pruned_loss=0.06082, over 956328.11 frames. ], batch size: 38, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:22:53,368 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=59385.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:23:12,107 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1476, 1.4408, 1.2971, 1.6725, 1.5316, 1.7387, 1.3034, 3.0570], + device='cuda:0'), covar=tensor([0.0657, 0.0808, 0.0840, 0.1236, 0.0662, 0.0524, 0.0782, 0.0177], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 01:23:25,567 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5216, 0.9810, 1.5855, 1.8840, 1.5635, 1.5102, 1.5359, 1.6099], + device='cuda:0'), covar=tensor([0.7046, 0.8509, 0.9643, 1.0984, 0.8721, 1.1459, 1.1112, 1.0503], + device='cuda:0'), in_proj_covar=tensor([0.0408, 0.0409, 0.0498, 0.0515, 0.0437, 0.0457, 0.0465, 0.0467], + device='cuda:0'), out_proj_covar=tensor([9.9089e-05, 1.0157e-04, 1.1228e-04, 1.2235e-04, 1.0591e-04, 1.1035e-04, + 1.1147e-04, 1.1200e-04], device='cuda:0') +2023-04-27 01:23:32,962 INFO [finetune.py:976] (0/7) Epoch 11, batch 2150, loss[loss=0.194, simple_loss=0.2781, pruned_loss=0.05493, over 4825.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2568, pruned_loss=0.06224, over 955123.27 frames. ], batch size: 33, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:23:35,512 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59431.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:23:44,711 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.256e+02 1.698e+02 2.142e+02 2.512e+02 4.265e+02, threshold=4.284e+02, percent-clipped=3.0 +2023-04-27 01:23:54,707 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.31 vs. limit=5.0 +2023-04-27 01:24:15,854 INFO [finetune.py:976] (0/7) Epoch 11, batch 2200, loss[loss=0.2195, simple_loss=0.2843, pruned_loss=0.07741, over 4825.00 frames. ], tot_loss[loss=0.193, simple_loss=0.26, pruned_loss=0.06302, over 953390.21 frames. ], batch size: 45, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:24:18,113 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=59479.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:24:35,839 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1098, 1.4170, 1.5790, 1.7461, 1.5938, 1.7505, 1.6476, 1.5979], + device='cuda:0'), covar=tensor([0.4746, 0.6277, 0.5220, 0.4953, 0.6253, 0.8455, 0.5879, 0.5394], + device='cuda:0'), in_proj_covar=tensor([0.0327, 0.0379, 0.0315, 0.0326, 0.0339, 0.0400, 0.0358, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 01:24:37,687 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59502.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:24:54,697 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59520.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:24:59,357 INFO [finetune.py:976] (0/7) Epoch 11, batch 2250, loss[loss=0.2037, simple_loss=0.2675, pruned_loss=0.07, over 4754.00 frames. ], tot_loss[loss=0.1933, simple_loss=0.2602, pruned_loss=0.06314, over 952802.60 frames. ], batch size: 26, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:25:13,213 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.641e+02 2.091e+02 2.357e+02 4.911e+02, threshold=4.183e+02, percent-clipped=2.0 +2023-04-27 01:25:45,852 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59562.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:25:46,509 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59563.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:25:55,720 INFO [finetune.py:976] (0/7) Epoch 11, batch 2300, loss[loss=0.1921, simple_loss=0.2496, pruned_loss=0.06735, over 4918.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2601, pruned_loss=0.06264, over 951528.21 frames. ], batch size: 42, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:26:17,896 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=59610.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:26:29,150 INFO [finetune.py:976] (0/7) Epoch 11, batch 2350, loss[loss=0.1723, simple_loss=0.2419, pruned_loss=0.05133, over 4837.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.257, pruned_loss=0.06121, over 950383.82 frames. ], batch size: 44, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:26:31,622 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.26 vs. limit=5.0 +2023-04-27 01:26:37,792 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.642e+02 1.943e+02 2.312e+02 3.854e+02, threshold=3.885e+02, percent-clipped=0.0 +2023-04-27 01:27:00,870 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1842, 2.2424, 1.9258, 1.8212, 2.3649, 1.9621, 2.8439, 1.6954], + device='cuda:0'), covar=tensor([0.3860, 0.1895, 0.4606, 0.3273, 0.1715, 0.2535, 0.1601, 0.4582], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0348, 0.0427, 0.0359, 0.0384, 0.0383, 0.0381, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 01:27:02,575 INFO [finetune.py:976] (0/7) Epoch 11, batch 2400, loss[loss=0.2093, simple_loss=0.2671, pruned_loss=0.07577, over 4913.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2535, pruned_loss=0.06001, over 951510.64 frames. ], batch size: 43, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:27:36,063 INFO [finetune.py:976] (0/7) Epoch 11, batch 2450, loss[loss=0.1514, simple_loss=0.2129, pruned_loss=0.04497, over 4757.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2512, pruned_loss=0.05912, over 952087.91 frames. ], batch size: 27, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:27:43,830 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.580e+02 1.900e+02 2.443e+02 4.290e+02, threshold=3.800e+02, percent-clipped=1.0 +2023-04-27 01:27:47,859 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1956, 1.2577, 1.3786, 1.5421, 1.5319, 1.2746, 0.8773, 1.3992], + device='cuda:0'), covar=tensor([0.0986, 0.1414, 0.0936, 0.0754, 0.0733, 0.0955, 0.1064, 0.0676], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0204, 0.0184, 0.0176, 0.0179, 0.0189, 0.0161, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 01:28:04,887 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-27 01:28:09,969 INFO [finetune.py:976] (0/7) Epoch 11, batch 2500, loss[loss=0.1479, simple_loss=0.2217, pruned_loss=0.03702, over 4760.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2525, pruned_loss=0.05958, over 953838.88 frames. ], batch size: 27, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:28:32,780 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7478, 3.7880, 2.7164, 4.3647, 3.7615, 3.7415, 1.8431, 3.7337], + device='cuda:0'), covar=tensor([0.1444, 0.1089, 0.3072, 0.1396, 0.2785, 0.1691, 0.4995, 0.2150], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0216, 0.0248, 0.0304, 0.0296, 0.0249, 0.0268, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 01:28:41,886 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-27 01:28:44,916 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=59820.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:28:54,834 INFO [finetune.py:976] (0/7) Epoch 11, batch 2550, loss[loss=0.2338, simple_loss=0.2956, pruned_loss=0.08596, over 4878.00 frames. ], tot_loss[loss=0.189, simple_loss=0.2566, pruned_loss=0.0607, over 954998.14 frames. ], batch size: 34, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:29:12,510 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.688e+02 2.032e+02 2.494e+02 8.629e+02, threshold=4.065e+02, percent-clipped=5.0 +2023-04-27 01:29:23,398 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59848.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:29:35,316 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=59858.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:29:46,953 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=59868.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:29:57,640 INFO [finetune.py:976] (0/7) Epoch 11, batch 2600, loss[loss=0.1304, simple_loss=0.1986, pruned_loss=0.03115, over 4788.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2581, pruned_loss=0.06109, over 955916.53 frames. ], batch size: 26, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:30:10,826 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=59889.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:30:22,124 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7348, 1.3273, 1.8294, 2.2323, 1.8703, 1.6949, 1.7510, 1.7792], + device='cuda:0'), covar=tensor([0.5266, 0.7703, 0.7370, 0.6975, 0.6601, 0.8804, 0.9377, 0.9337], + device='cuda:0'), in_proj_covar=tensor([0.0408, 0.0410, 0.0497, 0.0517, 0.0439, 0.0457, 0.0466, 0.0467], + device='cuda:0'), out_proj_covar=tensor([9.9118e-05, 1.0185e-04, 1.1218e-04, 1.2272e-04, 1.0617e-04, 1.1056e-04, + 1.1183e-04, 1.1213e-04], device='cuda:0') +2023-04-27 01:30:29,466 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59909.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:30:32,466 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.86 vs. limit=5.0 +2023-04-27 01:30:43,254 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-27 01:30:51,533 INFO [finetune.py:976] (0/7) Epoch 11, batch 2650, loss[loss=0.1981, simple_loss=0.271, pruned_loss=0.06257, over 4801.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2584, pruned_loss=0.06036, over 954035.47 frames. ], batch size: 40, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:31:04,460 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.693e+02 1.978e+02 2.480e+02 4.060e+02, threshold=3.956e+02, percent-clipped=0.0 +2023-04-27 01:31:15,135 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=59950.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:31:43,484 INFO [finetune.py:976] (0/7) Epoch 11, batch 2700, loss[loss=0.1768, simple_loss=0.2422, pruned_loss=0.05571, over 4906.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2583, pruned_loss=0.06065, over 953368.00 frames. ], batch size: 36, lr: 3.70e-03, grad_scale: 32.0 +2023-04-27 01:31:58,613 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-60000.pt +2023-04-27 01:32:02,900 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 01:32:05,801 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1099, 1.5461, 1.9071, 2.0269, 1.8442, 1.4963, 0.9608, 1.5855], + device='cuda:0'), covar=tensor([0.3534, 0.3437, 0.1803, 0.2655, 0.3070, 0.2881, 0.4896, 0.2407], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0248, 0.0220, 0.0314, 0.0214, 0.0227, 0.0231, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 01:32:17,131 INFO [finetune.py:976] (0/7) Epoch 11, batch 2750, loss[loss=0.1497, simple_loss=0.2305, pruned_loss=0.03442, over 4891.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2558, pruned_loss=0.05979, over 954299.27 frames. ], batch size: 35, lr: 3.69e-03, grad_scale: 64.0 +2023-04-27 01:32:24,358 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.196e+02 1.624e+02 1.944e+02 2.490e+02 4.678e+02, threshold=3.889e+02, percent-clipped=1.0 +2023-04-27 01:32:25,699 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60040.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:32:33,980 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60052.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:32:42,795 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60065.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:32:50,410 INFO [finetune.py:976] (0/7) Epoch 11, batch 2800, loss[loss=0.1361, simple_loss=0.2055, pruned_loss=0.03329, over 4779.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2527, pruned_loss=0.05902, over 952838.23 frames. ], batch size: 26, lr: 3.69e-03, grad_scale: 64.0 +2023-04-27 01:32:51,104 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60078.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:33:05,621 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60101.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 01:33:14,386 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60113.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 01:33:18,742 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.09 vs. limit=5.0 +2023-04-27 01:33:22,780 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60126.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:33:23,287 INFO [finetune.py:976] (0/7) Epoch 11, batch 2850, loss[loss=0.205, simple_loss=0.2637, pruned_loss=0.07322, over 4692.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2513, pruned_loss=0.0591, over 950090.88 frames. ], batch size: 23, lr: 3.69e-03, grad_scale: 64.0 +2023-04-27 01:33:29,987 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.009e+02 1.671e+02 1.998e+02 2.360e+02 5.267e+02, threshold=3.997e+02, percent-clipped=5.0 +2023-04-27 01:33:30,711 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60139.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:33:40,867 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 01:33:43,073 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60158.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:33:55,931 INFO [finetune.py:976] (0/7) Epoch 11, batch 2900, loss[loss=0.2355, simple_loss=0.2875, pruned_loss=0.09173, over 4774.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2552, pruned_loss=0.06075, over 951663.41 frames. ], batch size: 54, lr: 3.69e-03, grad_scale: 64.0 +2023-04-27 01:34:15,766 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60198.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:34:25,132 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60204.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:34:26,338 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=60206.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:34:59,279 INFO [finetune.py:976] (0/7) Epoch 11, batch 2950, loss[loss=0.1929, simple_loss=0.2638, pruned_loss=0.06106, over 4852.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2577, pruned_loss=0.06145, over 951247.46 frames. ], batch size: 44, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:35:01,800 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7863, 1.2768, 1.5393, 1.6862, 1.4656, 1.2504, 0.6865, 1.2423], + device='cuda:0'), covar=tensor([0.3571, 0.3874, 0.1996, 0.2560, 0.2897, 0.3008, 0.4717, 0.2374], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0249, 0.0221, 0.0316, 0.0215, 0.0228, 0.0231, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 01:35:12,692 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.759e+01 1.634e+02 1.896e+02 2.531e+02 4.222e+02, threshold=3.792e+02, percent-clipped=1.0 +2023-04-27 01:35:21,688 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60245.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:35:36,399 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60259.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:36:03,087 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5989, 1.6839, 0.6506, 1.2750, 1.7785, 1.4865, 1.3847, 1.3847], + device='cuda:0'), covar=tensor([0.0550, 0.0397, 0.0383, 0.0586, 0.0267, 0.0562, 0.0563, 0.0609], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0025, 0.0023, 0.0030, 0.0020, 0.0029, 0.0029, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0050, 0.0037, 0.0049, 0.0048, 0.0050], + device='cuda:0') +2023-04-27 01:36:05,411 INFO [finetune.py:976] (0/7) Epoch 11, batch 3000, loss[loss=0.1746, simple_loss=0.244, pruned_loss=0.05259, over 4829.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2585, pruned_loss=0.06134, over 951885.48 frames. ], batch size: 47, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:36:05,412 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 01:36:27,793 INFO [finetune.py:1010] (0/7) Epoch 11, validation: loss=0.1531, simple_loss=0.2255, pruned_loss=0.04032, over 2265189.00 frames. +2023-04-27 01:36:27,793 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6338MB +2023-04-27 01:36:59,794 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0987, 1.4459, 1.2487, 1.6488, 1.5226, 1.7207, 1.2611, 3.0022], + device='cuda:0'), covar=tensor([0.0651, 0.0776, 0.0807, 0.1188, 0.0644, 0.0475, 0.0765, 0.0172], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 01:37:31,377 INFO [finetune.py:976] (0/7) Epoch 11, batch 3050, loss[loss=0.1546, simple_loss=0.2323, pruned_loss=0.03844, over 4786.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2595, pruned_loss=0.06126, over 953687.41 frames. ], batch size: 29, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:37:45,703 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.005e+02 1.734e+02 2.169e+02 2.489e+02 4.339e+02, threshold=4.338e+02, percent-clipped=2.0 +2023-04-27 01:38:21,993 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 01:38:26,535 INFO [finetune.py:976] (0/7) Epoch 11, batch 3100, loss[loss=0.1527, simple_loss=0.2262, pruned_loss=0.03961, over 4770.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2565, pruned_loss=0.06025, over 954589.59 frames. ], batch size: 27, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:38:32,464 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2672, 3.0578, 0.8614, 1.5560, 1.7190, 2.1616, 1.7819, 0.9092], + device='cuda:0'), covar=tensor([0.1326, 0.0872, 0.1844, 0.1216, 0.0980, 0.0915, 0.1473, 0.1789], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0248, 0.0141, 0.0122, 0.0134, 0.0153, 0.0118, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 01:38:40,711 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60396.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 01:38:48,248 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60408.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 01:38:56,649 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60421.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:39:00,266 INFO [finetune.py:976] (0/7) Epoch 11, batch 3150, loss[loss=0.2026, simple_loss=0.2554, pruned_loss=0.07488, over 4805.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2545, pruned_loss=0.0603, over 954050.59 frames. ], batch size: 51, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:39:05,956 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60434.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:39:09,788 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.219e+02 1.640e+02 1.939e+02 2.354e+02 4.857e+02, threshold=3.879e+02, percent-clipped=2.0 +2023-04-27 01:39:17,082 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8782, 1.4356, 1.6788, 1.7768, 1.6294, 1.3570, 0.8049, 1.3756], + device='cuda:0'), covar=tensor([0.3424, 0.3598, 0.1750, 0.2174, 0.2551, 0.2839, 0.4361, 0.2356], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0246, 0.0219, 0.0313, 0.0212, 0.0226, 0.0229, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 01:39:33,960 INFO [finetune.py:976] (0/7) Epoch 11, batch 3200, loss[loss=0.1546, simple_loss=0.2248, pruned_loss=0.04226, over 4901.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2515, pruned_loss=0.05927, over 955853.95 frames. ], batch size: 35, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:39:53,420 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60504.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:39:59,563 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0419, 3.7142, 1.2522, 2.0398, 2.3189, 2.7297, 2.2118, 1.3931], + device='cuda:0'), covar=tensor([0.1221, 0.0927, 0.2077, 0.1194, 0.0962, 0.0920, 0.1597, 0.1735], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0250, 0.0142, 0.0122, 0.0134, 0.0154, 0.0119, 0.0123], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 01:40:03,282 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1792, 1.2161, 1.3409, 1.5146, 1.5493, 1.2543, 0.8343, 1.4222], + device='cuda:0'), covar=tensor([0.0932, 0.1501, 0.0905, 0.0652, 0.0747, 0.0874, 0.1032, 0.0648], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0203, 0.0183, 0.0176, 0.0180, 0.0189, 0.0160, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 01:40:07,940 INFO [finetune.py:976] (0/7) Epoch 11, batch 3250, loss[loss=0.1897, simple_loss=0.265, pruned_loss=0.05719, over 4819.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2511, pruned_loss=0.05915, over 955413.87 frames. ], batch size: 40, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:40:15,722 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.675e+02 1.982e+02 2.288e+02 4.621e+02, threshold=3.964e+02, percent-clipped=1.0 +2023-04-27 01:40:20,474 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60545.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:40:25,156 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=60552.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:40:26,842 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=60554.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:40:52,022 INFO [finetune.py:976] (0/7) Epoch 11, batch 3300, loss[loss=0.1667, simple_loss=0.2408, pruned_loss=0.04631, over 4760.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2566, pruned_loss=0.06142, over 952357.88 frames. ], batch size: 59, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:40:53,952 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3336, 1.2348, 1.5917, 1.4941, 1.2418, 1.1229, 1.2402, 0.7898], + device='cuda:0'), covar=tensor([0.0563, 0.0762, 0.0468, 0.0609, 0.0790, 0.1240, 0.0621, 0.0779], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0072, 0.0070, 0.0067, 0.0075, 0.0096, 0.0076, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 01:41:14,031 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=60593.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:41:58,102 INFO [finetune.py:976] (0/7) Epoch 11, batch 3350, loss[loss=0.1712, simple_loss=0.2418, pruned_loss=0.05032, over 4813.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2589, pruned_loss=0.06223, over 952923.53 frames. ], batch size: 40, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:42:08,515 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 01:42:11,849 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.646e+02 2.104e+02 2.646e+02 5.419e+02, threshold=4.209e+02, percent-clipped=4.0 +2023-04-27 01:42:52,679 INFO [finetune.py:976] (0/7) Epoch 11, batch 3400, loss[loss=0.1866, simple_loss=0.262, pruned_loss=0.05559, over 4888.00 frames. ], tot_loss[loss=0.1931, simple_loss=0.2604, pruned_loss=0.06291, over 952329.62 frames. ], batch size: 32, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:43:05,425 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60696.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:43:14,147 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60708.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:43:20,714 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=60718.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:43:22,442 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60721.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:43:26,000 INFO [finetune.py:976] (0/7) Epoch 11, batch 3450, loss[loss=0.1708, simple_loss=0.2369, pruned_loss=0.05233, over 4816.00 frames. ], tot_loss[loss=0.1922, simple_loss=0.2594, pruned_loss=0.06248, over 953419.23 frames. ], batch size: 40, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:43:26,772 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7520, 1.3083, 1.8726, 2.0836, 1.7406, 1.6805, 1.7721, 1.7867], + device='cuda:0'), covar=tensor([0.6966, 0.9065, 0.9556, 1.1223, 0.8637, 1.1302, 1.1818, 1.0827], + device='cuda:0'), in_proj_covar=tensor([0.0408, 0.0410, 0.0497, 0.0515, 0.0438, 0.0458, 0.0467, 0.0466], + device='cuda:0'), out_proj_covar=tensor([9.9156e-05, 1.0168e-04, 1.1203e-04, 1.2226e-04, 1.0616e-04, 1.1067e-04, + 1.1183e-04, 1.1190e-04], device='cuda:0') +2023-04-27 01:43:30,779 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60734.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:43:33,697 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.685e+02 2.029e+02 2.444e+02 3.873e+02, threshold=4.057e+02, percent-clipped=0.0 +2023-04-27 01:43:34,472 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.66 vs. limit=5.0 +2023-04-27 01:43:36,748 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=60744.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:43:45,475 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=60756.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:43:54,319 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=60769.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:43:59,112 INFO [finetune.py:976] (0/7) Epoch 11, batch 3500, loss[loss=0.1709, simple_loss=0.2396, pruned_loss=0.05114, over 4890.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2565, pruned_loss=0.06157, over 953552.57 frames. ], batch size: 32, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:44:00,466 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=60779.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:44:02,192 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=60782.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:44:22,363 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.61 vs. limit=5.0 +2023-04-27 01:44:29,366 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 01:44:32,809 INFO [finetune.py:976] (0/7) Epoch 11, batch 3550, loss[loss=0.2002, simple_loss=0.2702, pruned_loss=0.06508, over 4686.00 frames. ], tot_loss[loss=0.1883, simple_loss=0.2546, pruned_loss=0.06099, over 953891.59 frames. ], batch size: 23, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:44:40,092 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.124e+02 1.561e+02 1.887e+02 2.297e+02 4.767e+02, threshold=3.774e+02, percent-clipped=1.0 +2023-04-27 01:44:44,360 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1378, 2.6842, 0.8775, 1.3080, 1.7930, 1.1875, 3.4636, 1.6052], + device='cuda:0'), covar=tensor([0.0767, 0.0767, 0.0951, 0.1604, 0.0687, 0.1249, 0.0334, 0.0880], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0051, 0.0052, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 01:44:50,270 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=60854.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:45:06,039 INFO [finetune.py:976] (0/7) Epoch 11, batch 3600, loss[loss=0.1946, simple_loss=0.2676, pruned_loss=0.06078, over 4811.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2507, pruned_loss=0.05927, over 951558.44 frames. ], batch size: 39, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:45:10,572 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-27 01:45:21,745 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=60902.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:45:39,843 INFO [finetune.py:976] (0/7) Epoch 11, batch 3650, loss[loss=0.2382, simple_loss=0.2917, pruned_loss=0.09234, over 4744.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2517, pruned_loss=0.05957, over 949947.89 frames. ], batch size: 54, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:45:47,175 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.103e+02 1.725e+02 2.086e+02 2.393e+02 4.773e+02, threshold=4.172e+02, percent-clipped=3.0 +2023-04-27 01:46:04,733 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4082, 2.8786, 1.0021, 1.5094, 2.1872, 1.5289, 4.2683, 2.0404], + device='cuda:0'), covar=tensor([0.0705, 0.0901, 0.0945, 0.1374, 0.0577, 0.1054, 0.0250, 0.0654], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0066, 0.0049, 0.0047, 0.0051, 0.0052, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 01:46:25,831 INFO [finetune.py:976] (0/7) Epoch 11, batch 3700, loss[loss=0.2184, simple_loss=0.2891, pruned_loss=0.07382, over 4733.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2553, pruned_loss=0.06011, over 951060.67 frames. ], batch size: 59, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:47:05,595 INFO [finetune.py:976] (0/7) Epoch 11, batch 3750, loss[loss=0.2315, simple_loss=0.3011, pruned_loss=0.08094, over 4829.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2572, pruned_loss=0.06058, over 951921.13 frames. ], batch size: 47, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:47:18,733 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.172e+02 1.566e+02 1.879e+02 2.369e+02 3.505e+02, threshold=3.758e+02, percent-clipped=0.0 +2023-04-27 01:47:30,639 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-27 01:47:40,533 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61061.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:48:01,155 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61074.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:48:05,286 INFO [finetune.py:976] (0/7) Epoch 11, batch 3800, loss[loss=0.1533, simple_loss=0.2215, pruned_loss=0.04255, over 4741.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2582, pruned_loss=0.06074, over 952408.54 frames. ], batch size: 59, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:48:06,520 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9578, 1.6360, 1.9242, 2.2681, 2.3590, 1.7305, 1.4126, 2.1006], + device='cuda:0'), covar=tensor([0.0819, 0.1223, 0.0756, 0.0568, 0.0529, 0.0932, 0.0985, 0.0566], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0205, 0.0184, 0.0176, 0.0180, 0.0190, 0.0160, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 01:48:30,375 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3466, 1.6084, 1.3407, 1.5757, 1.3859, 1.2262, 1.3243, 1.0497], + device='cuda:0'), covar=tensor([0.1499, 0.1179, 0.1020, 0.1051, 0.3338, 0.1278, 0.1631, 0.2274], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0315, 0.0227, 0.0285, 0.0314, 0.0272, 0.0256, 0.0276], + device='cuda:0'), out_proj_covar=tensor([1.1879e-04, 1.2659e-04, 9.1027e-05, 1.1418e-04, 1.2831e-04, 1.0915e-04, + 1.0421e-04, 1.1060e-04], device='cuda:0') +2023-04-27 01:48:56,478 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61122.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:48:59,869 INFO [finetune.py:976] (0/7) Epoch 11, batch 3850, loss[loss=0.1837, simple_loss=0.2395, pruned_loss=0.06393, over 4901.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2568, pruned_loss=0.06012, over 952489.14 frames. ], batch size: 35, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:49:08,085 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.671e+02 1.909e+02 2.247e+02 3.528e+02, threshold=3.817e+02, percent-clipped=0.0 +2023-04-27 01:49:33,125 INFO [finetune.py:976] (0/7) Epoch 11, batch 3900, loss[loss=0.1901, simple_loss=0.2495, pruned_loss=0.06532, over 4927.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2547, pruned_loss=0.06017, over 953349.90 frames. ], batch size: 38, lr: 3.69e-03, grad_scale: 32.0 +2023-04-27 01:49:52,980 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9632, 2.6046, 2.0344, 1.8305, 1.4271, 1.4650, 2.1167, 1.4432], + device='cuda:0'), covar=tensor([0.1759, 0.1334, 0.1421, 0.1840, 0.2363, 0.1978, 0.0963, 0.2036], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0214, 0.0169, 0.0205, 0.0203, 0.0185, 0.0159, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 01:50:05,883 INFO [finetune.py:976] (0/7) Epoch 11, batch 3950, loss[loss=0.1459, simple_loss=0.2159, pruned_loss=0.03799, over 4806.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2515, pruned_loss=0.05902, over 955136.69 frames. ], batch size: 25, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 01:50:15,547 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.581e+02 1.829e+02 2.221e+02 4.088e+02, threshold=3.658e+02, percent-clipped=2.0 +2023-04-27 01:50:39,619 INFO [finetune.py:976] (0/7) Epoch 11, batch 4000, loss[loss=0.2022, simple_loss=0.2756, pruned_loss=0.06433, over 4823.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2524, pruned_loss=0.05986, over 956899.57 frames. ], batch size: 40, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 01:51:18,202 INFO [finetune.py:976] (0/7) Epoch 11, batch 4050, loss[loss=0.1983, simple_loss=0.2799, pruned_loss=0.05834, over 4891.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2564, pruned_loss=0.0614, over 955981.87 frames. ], batch size: 43, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 01:51:37,047 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.723e+02 1.975e+02 2.601e+02 5.201e+02, threshold=3.950e+02, percent-clipped=3.0 +2023-04-27 01:51:57,075 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4199, 1.0159, 0.4135, 1.1966, 0.9927, 1.3105, 1.2085, 1.2592], + device='cuda:0'), covar=tensor([0.0562, 0.0447, 0.0448, 0.0609, 0.0340, 0.0578, 0.0574, 0.0623], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0025, 0.0023, 0.0030, 0.0020, 0.0029, 0.0029, 0.0030], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0050, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 01:52:07,576 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 01:52:21,137 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8587, 1.5821, 2.1241, 2.3609, 1.9708, 1.8547, 1.9444, 1.8876], + device='cuda:0'), covar=tensor([0.5743, 0.7965, 0.8274, 0.7047, 0.6904, 0.9685, 1.0201, 1.0035], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0412, 0.0500, 0.0518, 0.0442, 0.0461, 0.0472, 0.0471], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 01:52:21,694 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61374.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:52:22,343 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7155, 2.4620, 1.6949, 1.5505, 1.2492, 1.2791, 1.7391, 1.1796], + device='cuda:0'), covar=tensor([0.1762, 0.1283, 0.1588, 0.1803, 0.2560, 0.2125, 0.1050, 0.2204], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0214, 0.0169, 0.0204, 0.0203, 0.0184, 0.0158, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 01:52:23,440 INFO [finetune.py:976] (0/7) Epoch 11, batch 4100, loss[loss=0.1923, simple_loss=0.2655, pruned_loss=0.05951, over 4768.00 frames. ], tot_loss[loss=0.1915, simple_loss=0.259, pruned_loss=0.06201, over 955221.02 frames. ], batch size: 26, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 01:53:06,877 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8145, 1.0967, 1.5377, 1.6854, 1.6179, 1.7486, 1.6039, 1.5590], + device='cuda:0'), covar=tensor([0.4084, 0.5755, 0.4804, 0.4628, 0.5869, 0.8032, 0.5245, 0.4976], + device='cuda:0'), in_proj_covar=tensor([0.0324, 0.0377, 0.0313, 0.0325, 0.0338, 0.0398, 0.0357, 0.0321], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 01:53:16,498 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=61417.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:53:25,484 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=61422.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:53:28,494 INFO [finetune.py:976] (0/7) Epoch 11, batch 4150, loss[loss=0.1534, simple_loss=0.2228, pruned_loss=0.042, over 4785.00 frames. ], tot_loss[loss=0.1917, simple_loss=0.2595, pruned_loss=0.06199, over 954472.80 frames. ], batch size: 25, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 01:53:48,240 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.660e+02 1.903e+02 2.358e+02 4.640e+02, threshold=3.807e+02, percent-clipped=2.0 +2023-04-27 01:54:37,130 INFO [finetune.py:976] (0/7) Epoch 11, batch 4200, loss[loss=0.1629, simple_loss=0.222, pruned_loss=0.05196, over 4721.00 frames. ], tot_loss[loss=0.1927, simple_loss=0.2601, pruned_loss=0.06261, over 954628.38 frames. ], batch size: 23, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 01:55:27,114 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-27 01:55:45,102 INFO [finetune.py:976] (0/7) Epoch 11, batch 4250, loss[loss=0.1855, simple_loss=0.247, pruned_loss=0.06204, over 4908.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2568, pruned_loss=0.06165, over 956922.99 frames. ], batch size: 43, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 01:55:57,237 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.633e+02 1.870e+02 2.370e+02 4.371e+02, threshold=3.739e+02, percent-clipped=2.0 +2023-04-27 01:56:41,160 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-27 01:56:49,827 INFO [finetune.py:976] (0/7) Epoch 11, batch 4300, loss[loss=0.1602, simple_loss=0.2309, pruned_loss=0.04475, over 4832.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2546, pruned_loss=0.06112, over 956292.65 frames. ], batch size: 33, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 01:57:11,836 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5595, 3.4349, 0.8158, 1.8435, 1.9559, 2.3025, 2.0152, 0.9311], + device='cuda:0'), covar=tensor([0.1319, 0.1064, 0.2223, 0.1279, 0.1075, 0.1054, 0.1355, 0.2130], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0250, 0.0142, 0.0123, 0.0135, 0.0154, 0.0119, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 01:57:23,568 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7405, 1.6205, 1.9525, 2.1153, 1.6021, 1.3579, 1.6253, 1.0642], + device='cuda:0'), covar=tensor([0.0644, 0.0784, 0.0500, 0.0627, 0.0781, 0.1388, 0.0783, 0.0890], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0072, 0.0071, 0.0068, 0.0076, 0.0097, 0.0077, 0.0073], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 01:57:57,464 INFO [finetune.py:976] (0/7) Epoch 11, batch 4350, loss[loss=0.1608, simple_loss=0.2249, pruned_loss=0.04829, over 4759.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2508, pruned_loss=0.05966, over 957963.91 frames. ], batch size: 54, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 01:58:10,464 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.036e+02 1.619e+02 1.912e+02 2.174e+02 4.082e+02, threshold=3.823e+02, percent-clipped=2.0 +2023-04-27 01:59:02,120 INFO [finetune.py:976] (0/7) Epoch 11, batch 4400, loss[loss=0.2381, simple_loss=0.3036, pruned_loss=0.08632, over 4912.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2535, pruned_loss=0.06088, over 958187.71 frames. ], batch size: 36, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 01:59:57,203 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=61717.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 01:59:58,571 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 02:00:08,835 INFO [finetune.py:976] (0/7) Epoch 11, batch 4450, loss[loss=0.2448, simple_loss=0.3077, pruned_loss=0.09095, over 4815.00 frames. ], tot_loss[loss=0.1907, simple_loss=0.2571, pruned_loss=0.06214, over 955348.53 frames. ], batch size: 51, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:00:09,569 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8996, 1.6526, 1.9579, 2.1931, 2.2713, 1.7731, 1.5957, 2.0515], + device='cuda:0'), covar=tensor([0.1003, 0.1211, 0.0852, 0.0758, 0.0687, 0.1031, 0.0954, 0.0620], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0202, 0.0181, 0.0173, 0.0178, 0.0187, 0.0158, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:00:17,351 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61732.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:00:21,986 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.107e+02 1.778e+02 2.036e+02 2.485e+02 5.642e+02, threshold=4.071e+02, percent-clipped=3.0 +2023-04-27 02:00:24,581 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61743.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:00:45,388 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=61765.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:01:03,255 INFO [finetune.py:976] (0/7) Epoch 11, batch 4500, loss[loss=0.1746, simple_loss=0.2475, pruned_loss=0.0508, over 4755.00 frames. ], tot_loss[loss=0.1913, simple_loss=0.2581, pruned_loss=0.06222, over 956254.33 frames. ], batch size: 26, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:01:19,117 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61793.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:01:26,332 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61804.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:01:42,137 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3553, 3.0863, 2.4493, 2.6556, 2.1290, 2.4283, 2.5553, 1.7668], + device='cuda:0'), covar=tensor([0.2063, 0.1310, 0.0794, 0.1307, 0.2961, 0.1415, 0.2041, 0.2778], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0314, 0.0225, 0.0284, 0.0312, 0.0270, 0.0252, 0.0275], + device='cuda:0'), out_proj_covar=tensor([1.1797e-04, 1.2626e-04, 9.0227e-05, 1.1343e-04, 1.2724e-04, 1.0830e-04, + 1.0265e-04, 1.1002e-04], device='cuda:0') +2023-04-27 02:01:42,639 INFO [finetune.py:976] (0/7) Epoch 11, batch 4550, loss[loss=0.1734, simple_loss=0.2531, pruned_loss=0.04688, over 4889.00 frames. ], tot_loss[loss=0.1918, simple_loss=0.2587, pruned_loss=0.06245, over 955132.88 frames. ], batch size: 35, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:01:49,951 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.694e+02 2.002e+02 2.452e+02 5.471e+02, threshold=4.003e+02, percent-clipped=1.0 +2023-04-27 02:01:51,914 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4793, 1.4599, 0.5697, 1.2296, 1.5110, 1.3947, 1.2759, 1.3197], + device='cuda:0'), covar=tensor([0.0557, 0.0405, 0.0437, 0.0586, 0.0308, 0.0575, 0.0547, 0.0623], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0025, 0.0023, 0.0030, 0.0020, 0.0029, 0.0029, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0050, 0.0037, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 02:02:16,335 INFO [finetune.py:976] (0/7) Epoch 11, batch 4600, loss[loss=0.1795, simple_loss=0.2447, pruned_loss=0.05713, over 4888.00 frames. ], tot_loss[loss=0.191, simple_loss=0.2582, pruned_loss=0.06186, over 955246.32 frames. ], batch size: 32, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:02:40,192 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=61914.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:02:42,644 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-27 02:02:49,393 INFO [finetune.py:976] (0/7) Epoch 11, batch 4650, loss[loss=0.179, simple_loss=0.2371, pruned_loss=0.06046, over 4901.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2567, pruned_loss=0.06197, over 955790.74 frames. ], batch size: 32, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:02:56,646 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.561e+02 1.984e+02 2.276e+02 4.966e+02, threshold=3.968e+02, percent-clipped=2.0 +2023-04-27 02:03:20,493 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=61975.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:03:22,016 INFO [finetune.py:976] (0/7) Epoch 11, batch 4700, loss[loss=0.2015, simple_loss=0.2684, pruned_loss=0.06726, over 4899.00 frames. ], tot_loss[loss=0.187, simple_loss=0.253, pruned_loss=0.06045, over 956271.85 frames. ], batch size: 36, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:03:27,948 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0160, 1.9813, 2.3014, 2.4923, 2.4733, 1.9310, 1.7952, 2.2402], + device='cuda:0'), covar=tensor([0.1021, 0.1004, 0.0593, 0.0560, 0.0611, 0.1036, 0.0857, 0.0524], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0204, 0.0183, 0.0174, 0.0180, 0.0189, 0.0159, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:03:36,525 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-62000.pt +2023-04-27 02:04:06,619 INFO [finetune.py:976] (0/7) Epoch 11, batch 4750, loss[loss=0.1967, simple_loss=0.2603, pruned_loss=0.06654, over 4823.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2514, pruned_loss=0.05989, over 958144.29 frames. ], batch size: 45, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:04:17,535 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6308, 1.1773, 1.7282, 2.1372, 1.7425, 1.5885, 1.6350, 1.6292], + device='cuda:0'), covar=tensor([0.5533, 0.7638, 0.7628, 0.7107, 0.6709, 0.9143, 0.8826, 0.9634], + device='cuda:0'), in_proj_covar=tensor([0.0410, 0.0410, 0.0498, 0.0514, 0.0441, 0.0459, 0.0469, 0.0469], + device='cuda:0'), out_proj_covar=tensor([9.9666e-05, 1.0163e-04, 1.1248e-04, 1.2210e-04, 1.0681e-04, 1.1109e-04, + 1.1233e-04, 1.1249e-04], device='cuda:0') +2023-04-27 02:04:20,378 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.587e+02 1.879e+02 2.301e+02 4.862e+02, threshold=3.757e+02, percent-clipped=3.0 +2023-04-27 02:04:27,919 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9486, 1.8947, 2.1967, 2.3669, 1.7612, 1.5923, 1.9584, 1.0106], + device='cuda:0'), covar=tensor([0.0682, 0.1008, 0.0553, 0.0962, 0.0924, 0.1323, 0.0873, 0.1054], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0072, 0.0070, 0.0067, 0.0075, 0.0096, 0.0076, 0.0072], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 02:04:56,946 INFO [finetune.py:976] (0/7) Epoch 11, batch 4800, loss[loss=0.1775, simple_loss=0.254, pruned_loss=0.05047, over 4744.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2546, pruned_loss=0.06043, over 958273.78 frames. ], batch size: 59, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:05:05,178 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62088.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:05:11,907 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62099.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:05:24,555 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 02:05:27,346 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62122.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:05:29,195 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3470, 1.2003, 1.6264, 1.5181, 1.2118, 1.0877, 1.2730, 0.7058], + device='cuda:0'), covar=tensor([0.0662, 0.0798, 0.0446, 0.0635, 0.0912, 0.1384, 0.0556, 0.0918], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0072, 0.0070, 0.0067, 0.0075, 0.0097, 0.0076, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 02:05:30,802 INFO [finetune.py:976] (0/7) Epoch 11, batch 4850, loss[loss=0.2252, simple_loss=0.2871, pruned_loss=0.08171, over 4840.00 frames. ], tot_loss[loss=0.1906, simple_loss=0.2583, pruned_loss=0.06151, over 957009.70 frames. ], batch size: 47, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:05:39,085 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.265e+02 1.826e+02 2.171e+02 2.650e+02 4.437e+02, threshold=4.341e+02, percent-clipped=4.0 +2023-04-27 02:06:08,686 INFO [finetune.py:976] (0/7) Epoch 11, batch 4900, loss[loss=0.2014, simple_loss=0.2803, pruned_loss=0.06124, over 4887.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2584, pruned_loss=0.06131, over 956986.98 frames. ], batch size: 32, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:06:18,171 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62183.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:06:52,812 INFO [finetune.py:976] (0/7) Epoch 11, batch 4950, loss[loss=0.1535, simple_loss=0.2103, pruned_loss=0.04833, over 3754.00 frames. ], tot_loss[loss=0.191, simple_loss=0.259, pruned_loss=0.06147, over 954860.25 frames. ], batch size: 16, lr: 3.68e-03, grad_scale: 64.0 +2023-04-27 02:07:00,200 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-27 02:07:01,575 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.624e+02 1.966e+02 2.483e+02 3.537e+02, threshold=3.932e+02, percent-clipped=0.0 +2023-04-27 02:07:21,452 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62270.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:07:23,318 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62273.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:07:26,151 INFO [finetune.py:976] (0/7) Epoch 11, batch 5000, loss[loss=0.1924, simple_loss=0.255, pruned_loss=0.06487, over 4832.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2556, pruned_loss=0.05993, over 954287.64 frames. ], batch size: 33, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:07:34,573 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3917, 1.0434, 1.1453, 1.1326, 1.5179, 1.3034, 1.0781, 1.0953], + device='cuda:0'), covar=tensor([0.1261, 0.1235, 0.1506, 0.1365, 0.0769, 0.1202, 0.1524, 0.1569], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0318, 0.0350, 0.0293, 0.0330, 0.0315, 0.0305, 0.0357], + device='cuda:0'), out_proj_covar=tensor([6.3586e-05, 6.7110e-05, 7.5235e-05, 6.0145e-05, 6.8833e-05, 6.7190e-05, + 6.5173e-05, 7.6637e-05], device='cuda:0') +2023-04-27 02:07:53,048 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5156, 2.6941, 2.1544, 2.3595, 2.7557, 2.2638, 3.6824, 2.1186], + device='cuda:0'), covar=tensor([0.4453, 0.2253, 0.5180, 0.3503, 0.2044, 0.3015, 0.1539, 0.4703], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0348, 0.0425, 0.0358, 0.0384, 0.0380, 0.0377, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:07:58,882 INFO [finetune.py:976] (0/7) Epoch 11, batch 5050, loss[loss=0.1657, simple_loss=0.2349, pruned_loss=0.04824, over 4928.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2537, pruned_loss=0.05988, over 954441.50 frames. ], batch size: 33, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:08:03,721 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62334.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:08:08,227 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.670e+02 2.040e+02 2.398e+02 4.126e+02, threshold=4.080e+02, percent-clipped=2.0 +2023-04-27 02:08:24,419 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-27 02:08:27,323 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1846, 1.5214, 1.4671, 1.7875, 1.6574, 1.9246, 1.3547, 3.2850], + device='cuda:0'), covar=tensor([0.0673, 0.0832, 0.0759, 0.1108, 0.0605, 0.0505, 0.0725, 0.0142], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0038, 0.0040, 0.0043, 0.0039, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 02:08:32,081 INFO [finetune.py:976] (0/7) Epoch 11, batch 5100, loss[loss=0.1858, simple_loss=0.2496, pruned_loss=0.061, over 4726.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2498, pruned_loss=0.05821, over 954383.31 frames. ], batch size: 23, lr: 3.68e-03, grad_scale: 32.0 +2023-04-27 02:08:39,907 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62388.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:08:42,198 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62391.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:08:47,533 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62399.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:09:06,023 INFO [finetune.py:976] (0/7) Epoch 11, batch 5150, loss[loss=0.2254, simple_loss=0.2914, pruned_loss=0.07966, over 4905.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2505, pruned_loss=0.05912, over 952539.72 frames. ], batch size: 35, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:09:12,052 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=62436.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:09:14,304 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.7843, 4.7474, 3.1721, 5.4949, 4.7243, 4.7206, 2.6885, 4.7721], + device='cuda:0'), covar=tensor([0.1537, 0.1158, 0.3221, 0.0926, 0.4407, 0.1778, 0.4964, 0.2010], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0218, 0.0252, 0.0306, 0.0299, 0.0249, 0.0270, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 02:09:14,851 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.955e+01 1.646e+02 2.010e+02 2.557e+02 5.535e+02, threshold=4.020e+02, percent-clipped=1.0 +2023-04-27 02:09:25,276 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=62447.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:09:33,231 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62452.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:10:01,359 INFO [finetune.py:976] (0/7) Epoch 11, batch 5200, loss[loss=0.1646, simple_loss=0.2347, pruned_loss=0.04725, over 4768.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2543, pruned_loss=0.06048, over 951529.44 frames. ], batch size: 28, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:10:02,034 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62478.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:10:26,481 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5873, 1.2373, 4.3529, 4.0571, 3.8803, 4.1484, 4.0465, 3.8636], + device='cuda:0'), covar=tensor([0.6927, 0.6286, 0.0982, 0.1773, 0.1060, 0.1689, 0.1245, 0.1475], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0305, 0.0400, 0.0406, 0.0348, 0.0405, 0.0311, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:10:51,654 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.92 vs. limit=5.0 +2023-04-27 02:10:58,656 INFO [finetune.py:976] (0/7) Epoch 11, batch 5250, loss[loss=0.1933, simple_loss=0.249, pruned_loss=0.06885, over 4216.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2553, pruned_loss=0.06031, over 952036.79 frames. ], batch size: 18, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:11:07,064 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.221e+02 1.615e+02 2.039e+02 2.344e+02 5.619e+02, threshold=4.078e+02, percent-clipped=3.0 +2023-04-27 02:11:10,690 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62545.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:11:28,226 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62570.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:11:32,450 INFO [finetune.py:976] (0/7) Epoch 11, batch 5300, loss[loss=0.2232, simple_loss=0.2919, pruned_loss=0.07723, over 4751.00 frames. ], tot_loss[loss=0.1919, simple_loss=0.259, pruned_loss=0.06237, over 952971.00 frames. ], batch size: 54, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:11:37,332 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62585.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:11:52,266 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62606.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:12:00,028 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=62618.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:12:03,578 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1515, 2.5140, 1.0308, 1.3928, 1.9837, 1.2407, 3.6018, 1.8205], + device='cuda:0'), covar=tensor([0.0633, 0.0757, 0.0811, 0.1298, 0.0502, 0.1003, 0.0274, 0.0601], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0067, 0.0049, 0.0047, 0.0051, 0.0053, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 02:12:05,914 INFO [finetune.py:976] (0/7) Epoch 11, batch 5350, loss[loss=0.1735, simple_loss=0.2434, pruned_loss=0.05186, over 4888.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2582, pruned_loss=0.06121, over 953093.17 frames. ], batch size: 32, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:12:07,195 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62629.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:12:13,875 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.227e+02 1.690e+02 2.017e+02 2.486e+02 5.260e+02, threshold=4.033e+02, percent-clipped=4.0 +2023-04-27 02:12:18,150 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62646.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:12:32,053 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62666.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:12:39,517 INFO [finetune.py:976] (0/7) Epoch 11, batch 5400, loss[loss=0.1973, simple_loss=0.2678, pruned_loss=0.06341, over 4811.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2551, pruned_loss=0.05985, over 950890.43 frames. ], batch size: 41, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:13:12,193 INFO [finetune.py:976] (0/7) Epoch 11, batch 5450, loss[loss=0.1908, simple_loss=0.2538, pruned_loss=0.06388, over 4859.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2525, pruned_loss=0.05938, over 952223.93 frames. ], batch size: 49, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:13:12,309 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62727.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:13:20,546 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.017e+02 1.672e+02 1.919e+02 2.198e+02 3.769e+02, threshold=3.837e+02, percent-clipped=0.0 +2023-04-27 02:13:24,915 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62747.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:13:32,200 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0665, 0.7957, 0.9313, 0.7580, 1.2489, 1.0132, 0.8875, 0.9300], + device='cuda:0'), covar=tensor([0.1545, 0.1361, 0.1819, 0.1529, 0.0880, 0.1206, 0.1685, 0.2005], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0319, 0.0350, 0.0295, 0.0331, 0.0316, 0.0305, 0.0358], + device='cuda:0'), out_proj_covar=tensor([6.3838e-05, 6.7097e-05, 7.5322e-05, 6.0558e-05, 6.9040e-05, 6.7467e-05, + 6.5136e-05, 7.6817e-05], device='cuda:0') +2023-04-27 02:13:42,646 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=62773.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:13:45,485 INFO [finetune.py:976] (0/7) Epoch 11, batch 5500, loss[loss=0.1683, simple_loss=0.2329, pruned_loss=0.05189, over 4731.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2496, pruned_loss=0.05841, over 950209.24 frames. ], batch size: 54, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:13:46,157 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62778.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:14:17,877 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=62826.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:14:18,931 INFO [finetune.py:976] (0/7) Epoch 11, batch 5550, loss[loss=0.1412, simple_loss=0.2206, pruned_loss=0.0309, over 4796.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2508, pruned_loss=0.0589, over 951891.51 frames. ], batch size: 25, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:14:20,266 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9075, 1.6006, 1.9336, 2.3366, 2.3347, 1.8529, 1.6182, 2.0056], + device='cuda:0'), covar=tensor([0.0834, 0.1113, 0.0632, 0.0495, 0.0592, 0.0842, 0.0797, 0.0608], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0204, 0.0184, 0.0175, 0.0179, 0.0189, 0.0159, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:14:23,729 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=62834.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:14:27,256 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.295e+02 1.658e+02 2.068e+02 2.664e+02 6.166e+02, threshold=4.137e+02, percent-clipped=3.0 +2023-04-27 02:14:53,931 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8385, 1.5008, 2.0629, 2.2335, 1.9307, 1.7818, 1.9234, 1.9257], + device='cuda:0'), covar=tensor([0.5409, 0.7849, 0.8398, 0.7182, 0.7018, 0.9509, 0.9943, 1.0182], + device='cuda:0'), in_proj_covar=tensor([0.0407, 0.0407, 0.0493, 0.0512, 0.0437, 0.0456, 0.0464, 0.0465], + device='cuda:0'), out_proj_covar=tensor([9.8881e-05, 1.0064e-04, 1.1131e-04, 1.2155e-04, 1.0603e-04, 1.1012e-04, + 1.1133e-04, 1.1156e-04], device='cuda:0') +2023-04-27 02:15:05,419 INFO [finetune.py:976] (0/7) Epoch 11, batch 5600, loss[loss=0.1578, simple_loss=0.2401, pruned_loss=0.03776, over 4825.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2555, pruned_loss=0.06034, over 953607.64 frames. ], batch size: 33, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:15:26,463 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-27 02:15:37,388 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62901.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:16:10,654 INFO [finetune.py:976] (0/7) Epoch 11, batch 5650, loss[loss=0.1612, simple_loss=0.2159, pruned_loss=0.05323, over 4203.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2585, pruned_loss=0.06112, over 954210.36 frames. ], batch size: 18, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:16:11,892 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=62929.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:16:20,613 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8199, 1.6347, 2.0545, 2.2833, 1.9432, 1.7804, 1.8957, 1.8689], + device='cuda:0'), covar=tensor([0.5736, 0.8208, 0.8755, 0.7000, 0.6924, 1.0136, 1.0921, 1.0388], + device='cuda:0'), in_proj_covar=tensor([0.0406, 0.0406, 0.0492, 0.0510, 0.0436, 0.0456, 0.0464, 0.0465], + device='cuda:0'), out_proj_covar=tensor([9.8794e-05, 1.0058e-04, 1.1109e-04, 1.2131e-04, 1.0577e-04, 1.1020e-04, + 1.1124e-04, 1.1147e-04], device='cuda:0') +2023-04-27 02:16:23,344 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.567e+02 1.821e+02 2.303e+02 3.535e+02, threshold=3.642e+02, percent-clipped=0.0 +2023-04-27 02:16:23,980 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=62941.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:16:43,543 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 02:17:06,579 INFO [finetune.py:976] (0/7) Epoch 11, batch 5700, loss[loss=0.1723, simple_loss=0.2333, pruned_loss=0.05565, over 4254.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2549, pruned_loss=0.06066, over 936821.65 frames. ], batch size: 18, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:17:06,612 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=62977.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:17:23,662 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-11.pt +2023-04-27 02:17:38,294 INFO [finetune.py:976] (0/7) Epoch 12, batch 0, loss[loss=0.2104, simple_loss=0.2745, pruned_loss=0.07314, over 4890.00 frames. ], tot_loss[loss=0.2104, simple_loss=0.2745, pruned_loss=0.07314, over 4890.00 frames. ], batch size: 37, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:17:38,295 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 02:17:43,616 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7147, 1.5751, 1.8190, 2.0899, 2.1637, 1.7323, 1.4210, 1.9677], + device='cuda:0'), covar=tensor([0.0844, 0.1107, 0.0744, 0.0577, 0.0539, 0.0853, 0.0847, 0.0540], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0204, 0.0183, 0.0175, 0.0179, 0.0189, 0.0159, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:17:46,949 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1978, 1.5041, 1.6052, 1.7925, 1.7274, 1.8871, 1.6513, 1.6742], + device='cuda:0'), covar=tensor([0.4117, 0.5798, 0.5746, 0.4774, 0.6098, 0.7783, 0.6421, 0.5675], + device='cuda:0'), in_proj_covar=tensor([0.0327, 0.0379, 0.0315, 0.0327, 0.0339, 0.0400, 0.0359, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 02:17:54,892 INFO [finetune.py:1010] (0/7) Epoch 12, validation: loss=0.1544, simple_loss=0.2267, pruned_loss=0.04099, over 2265189.00 frames. +2023-04-27 02:17:55,897 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 02:18:07,739 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.24 vs. limit=5.0 +2023-04-27 02:18:18,013 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63022.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:18:32,221 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9403, 1.7312, 2.0795, 2.3355, 2.4304, 1.7846, 1.6182, 2.0801], + device='cuda:0'), covar=tensor([0.0926, 0.1102, 0.0660, 0.0538, 0.0568, 0.1057, 0.0881, 0.0615], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0204, 0.0183, 0.0175, 0.0179, 0.0189, 0.0159, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:18:39,643 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.931e+01 1.655e+02 2.029e+02 2.560e+02 6.942e+02, threshold=4.058e+02, percent-clipped=5.0 +2023-04-27 02:18:44,483 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63047.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:18:54,573 INFO [finetune.py:976] (0/7) Epoch 12, batch 50, loss[loss=0.2458, simple_loss=0.296, pruned_loss=0.09785, over 4901.00 frames. ], tot_loss[loss=0.1978, simple_loss=0.2645, pruned_loss=0.06559, over 217242.48 frames. ], batch size: 36, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:19:30,952 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=63095.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:19:37,863 INFO [finetune.py:976] (0/7) Epoch 12, batch 100, loss[loss=0.1441, simple_loss=0.2165, pruned_loss=0.03584, over 4824.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.256, pruned_loss=0.06136, over 381631.95 frames. ], batch size: 33, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:19:54,524 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63129.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:20:01,187 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.674e+02 1.936e+02 2.495e+02 3.786e+02, threshold=3.872e+02, percent-clipped=0.0 +2023-04-27 02:20:06,715 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 02:20:10,031 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1897, 1.6134, 1.4144, 1.8969, 1.7940, 2.0719, 1.4699, 3.4752], + device='cuda:0'), covar=tensor([0.0642, 0.0764, 0.0789, 0.1042, 0.0580, 0.0464, 0.0722, 0.0162], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0038, 0.0040, 0.0043, 0.0039, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 02:20:11,745 INFO [finetune.py:976] (0/7) Epoch 12, batch 150, loss[loss=0.1304, simple_loss=0.1985, pruned_loss=0.03116, over 4757.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2528, pruned_loss=0.06139, over 510533.82 frames. ], batch size: 28, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:20:53,021 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63201.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:21:01,803 INFO [finetune.py:976] (0/7) Epoch 12, batch 200, loss[loss=0.1757, simple_loss=0.2425, pruned_loss=0.05451, over 4746.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2501, pruned_loss=0.06019, over 609647.82 frames. ], batch size: 26, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:21:09,012 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 02:21:13,186 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63221.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:21:24,494 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.629e+02 1.966e+02 2.303e+02 3.666e+02, threshold=3.932e+02, percent-clipped=0.0 +2023-04-27 02:21:25,222 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63241.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:21:30,504 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=63249.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:21:34,067 INFO [finetune.py:976] (0/7) Epoch 12, batch 250, loss[loss=0.1576, simple_loss=0.2234, pruned_loss=0.04587, over 4811.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2534, pruned_loss=0.06094, over 686427.95 frames. ], batch size: 25, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:21:52,390 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63282.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:21:53,193 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-27 02:21:56,552 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=63289.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:22:12,122 INFO [finetune.py:976] (0/7) Epoch 12, batch 300, loss[loss=0.1891, simple_loss=0.2699, pruned_loss=0.05413, over 4932.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2572, pruned_loss=0.06119, over 748456.58 frames. ], batch size: 38, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:22:21,941 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0216, 2.6562, 1.9365, 2.0352, 1.4560, 1.4250, 2.0121, 1.3190], + device='cuda:0'), covar=tensor([0.1839, 0.1690, 0.1568, 0.1929, 0.2612, 0.2161, 0.1152, 0.2231], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0214, 0.0169, 0.0204, 0.0203, 0.0185, 0.0159, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 02:22:36,082 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63322.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:22:46,894 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.130e+02 1.771e+02 2.081e+02 2.585e+02 5.314e+02, threshold=4.161e+02, percent-clipped=4.0 +2023-04-27 02:23:01,682 INFO [finetune.py:976] (0/7) Epoch 12, batch 350, loss[loss=0.1629, simple_loss=0.2369, pruned_loss=0.04443, over 4753.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2577, pruned_loss=0.06081, over 795583.35 frames. ], batch size: 27, lr: 3.67e-03, grad_scale: 32.0 +2023-04-27 02:23:18,997 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=63370.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:23:57,901 INFO [finetune.py:976] (0/7) Epoch 12, batch 400, loss[loss=0.1964, simple_loss=0.2592, pruned_loss=0.06679, over 4816.00 frames. ], tot_loss[loss=0.1892, simple_loss=0.2574, pruned_loss=0.06053, over 830929.10 frames. ], batch size: 33, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:24:18,822 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63420.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:24:20,519 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63422.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:24:31,399 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63429.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:24:43,211 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.266e+02 1.590e+02 1.875e+02 2.265e+02 5.610e+02, threshold=3.751e+02, percent-clipped=2.0 +2023-04-27 02:25:03,610 INFO [finetune.py:976] (0/7) Epoch 12, batch 450, loss[loss=0.1885, simple_loss=0.2471, pruned_loss=0.06495, over 4706.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2559, pruned_loss=0.05978, over 859289.58 frames. ], batch size: 23, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:25:05,584 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0713, 1.0279, 1.2660, 1.2135, 0.9987, 0.8879, 1.0718, 0.5678], + device='cuda:0'), covar=tensor([0.0572, 0.0718, 0.0534, 0.0549, 0.0659, 0.1308, 0.0456, 0.0878], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0071, 0.0070, 0.0067, 0.0075, 0.0096, 0.0076, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 02:25:38,226 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=63477.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:25:46,535 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63481.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:25:47,765 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63483.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:26:12,258 INFO [finetune.py:976] (0/7) Epoch 12, batch 500, loss[loss=0.1986, simple_loss=0.2688, pruned_loss=0.06418, over 4902.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2547, pruned_loss=0.05971, over 882653.62 frames. ], batch size: 43, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:26:34,839 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9784, 0.5215, 0.8609, 0.6438, 1.1586, 0.8899, 0.7246, 0.8596], + device='cuda:0'), covar=tensor([0.2025, 0.1867, 0.2393, 0.1871, 0.1377, 0.1698, 0.2139, 0.2551], + device='cuda:0'), in_proj_covar=tensor([0.0304, 0.0317, 0.0349, 0.0294, 0.0329, 0.0314, 0.0304, 0.0357], + device='cuda:0'), out_proj_covar=tensor([6.3747e-05, 6.6725e-05, 7.5047e-05, 6.0479e-05, 6.8691e-05, 6.7006e-05, + 6.4890e-05, 7.6584e-05], device='cuda:0') +2023-04-27 02:26:41,240 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.737e+01 1.732e+02 1.941e+02 2.281e+02 3.295e+02, threshold=3.881e+02, percent-clipped=0.0 +2023-04-27 02:26:50,384 INFO [finetune.py:976] (0/7) Epoch 12, batch 550, loss[loss=0.1621, simple_loss=0.2245, pruned_loss=0.04989, over 4830.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2508, pruned_loss=0.05825, over 899557.23 frames. ], batch size: 33, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:26:59,494 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63569.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:27:05,799 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63577.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:27:12,218 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63586.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:27:15,934 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-27 02:27:23,277 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5221, 1.3123, 1.6976, 1.7036, 1.3640, 1.2177, 1.4294, 0.9114], + device='cuda:0'), covar=tensor([0.0573, 0.0963, 0.0503, 0.0843, 0.0907, 0.1272, 0.0768, 0.0773], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0072, 0.0070, 0.0067, 0.0075, 0.0097, 0.0076, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 02:27:23,755 INFO [finetune.py:976] (0/7) Epoch 12, batch 600, loss[loss=0.1782, simple_loss=0.244, pruned_loss=0.05613, over 4915.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2507, pruned_loss=0.0582, over 911968.79 frames. ], batch size: 36, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:27:28,285 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-27 02:27:38,787 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5294, 1.3045, 1.7228, 1.6737, 1.3119, 1.2180, 1.3916, 0.9074], + device='cuda:0'), covar=tensor([0.0607, 0.0907, 0.0486, 0.0693, 0.0858, 0.1291, 0.0602, 0.0754], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0072, 0.0070, 0.0067, 0.0076, 0.0097, 0.0076, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 02:27:41,100 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63630.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:27:48,533 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.750e+02 2.019e+02 2.576e+02 5.185e+02, threshold=4.039e+02, percent-clipped=2.0 +2023-04-27 02:27:52,935 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63647.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:27:57,754 INFO [finetune.py:976] (0/7) Epoch 12, batch 650, loss[loss=0.2035, simple_loss=0.29, pruned_loss=0.05856, over 4826.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2547, pruned_loss=0.05962, over 922879.86 frames. ], batch size: 40, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:28:00,972 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-27 02:28:42,664 INFO [finetune.py:976] (0/7) Epoch 12, batch 700, loss[loss=0.184, simple_loss=0.2608, pruned_loss=0.05359, over 4808.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2562, pruned_loss=0.05964, over 928846.56 frames. ], batch size: 40, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:29:23,284 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.645e+02 1.982e+02 2.413e+02 5.979e+02, threshold=3.963e+02, percent-clipped=2.0 +2023-04-27 02:29:26,940 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7932, 1.2661, 4.8179, 4.5367, 4.2396, 4.5647, 4.3459, 4.2796], + device='cuda:0'), covar=tensor([0.7149, 0.6098, 0.1052, 0.1708, 0.1021, 0.1120, 0.1701, 0.1557], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0303, 0.0401, 0.0405, 0.0348, 0.0404, 0.0313, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:29:32,955 INFO [finetune.py:976] (0/7) Epoch 12, batch 750, loss[loss=0.2179, simple_loss=0.2865, pruned_loss=0.07467, over 4824.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2574, pruned_loss=0.06022, over 935494.65 frames. ], batch size: 30, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:29:45,876 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63776.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:29:46,646 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-27 02:29:47,599 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63778.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:30:06,645 INFO [finetune.py:976] (0/7) Epoch 12, batch 800, loss[loss=0.2342, simple_loss=0.2865, pruned_loss=0.09096, over 4815.00 frames. ], tot_loss[loss=0.1897, simple_loss=0.2586, pruned_loss=0.06044, over 940476.28 frames. ], batch size: 40, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:30:09,844 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63810.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:30:37,941 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7568, 2.0086, 1.0722, 1.3915, 2.1878, 1.6502, 1.4468, 1.5822], + device='cuda:0'), covar=tensor([0.0523, 0.0368, 0.0358, 0.0591, 0.0243, 0.0568, 0.0555, 0.0625], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0025, 0.0023, 0.0030, 0.0020, 0.0029, 0.0029, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0050, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 02:30:40,780 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.251e+02 1.671e+02 1.936e+02 2.427e+02 3.896e+02, threshold=3.873e+02, percent-clipped=0.0 +2023-04-27 02:30:49,737 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63845.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:30:52,160 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3341, 3.0097, 1.0103, 1.8076, 1.7405, 2.1665, 1.7639, 0.9631], + device='cuda:0'), covar=tensor([0.1431, 0.1109, 0.1776, 0.1253, 0.1071, 0.1009, 0.1504, 0.1874], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0248, 0.0141, 0.0122, 0.0134, 0.0153, 0.0119, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 02:31:01,220 INFO [finetune.py:976] (0/7) Epoch 12, batch 850, loss[loss=0.1889, simple_loss=0.2544, pruned_loss=0.06169, over 4906.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2562, pruned_loss=0.06017, over 942664.31 frames. ], batch size: 37, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:31:03,800 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3115, 1.7121, 2.1136, 2.6268, 2.0960, 1.6942, 1.3315, 1.8899], + device='cuda:0'), covar=tensor([0.3651, 0.3470, 0.1776, 0.2510, 0.3097, 0.2991, 0.4710, 0.2527], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0250, 0.0223, 0.0316, 0.0215, 0.0229, 0.0231, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 02:31:21,233 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63871.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:31:24,876 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=63877.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:31:33,724 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2657, 1.5656, 1.3941, 1.8525, 1.7730, 1.9863, 1.3783, 3.6776], + device='cuda:0'), covar=tensor([0.0644, 0.0830, 0.0863, 0.1209, 0.0624, 0.0489, 0.0782, 0.0187], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 02:32:06,882 INFO [finetune.py:976] (0/7) Epoch 12, batch 900, loss[loss=0.1822, simple_loss=0.2541, pruned_loss=0.05512, over 4850.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.252, pruned_loss=0.0583, over 944479.84 frames. ], batch size: 33, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:32:07,616 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63906.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:32:19,238 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=63916.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:32:24,717 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:32:24,736 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63925.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:32:34,606 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.028e+02 1.661e+02 1.966e+02 2.302e+02 4.372e+02, threshold=3.933e+02, percent-clipped=1.0 +2023-04-27 02:32:36,450 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=63942.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:32:45,688 INFO [finetune.py:976] (0/7) Epoch 12, batch 950, loss[loss=0.2024, simple_loss=0.2669, pruned_loss=0.06897, over 4935.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2508, pruned_loss=0.05793, over 946911.15 frames. ], batch size: 38, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:32:57,816 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3568, 3.0465, 2.5391, 2.6655, 2.0017, 2.7299, 2.6497, 2.1384], + device='cuda:0'), covar=tensor([0.2208, 0.1602, 0.0766, 0.1228, 0.3450, 0.1022, 0.1966, 0.2333], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0314, 0.0226, 0.0283, 0.0312, 0.0268, 0.0253, 0.0274], + device='cuda:0'), out_proj_covar=tensor([1.1835e-04, 1.2614e-04, 9.0324e-05, 1.1329e-04, 1.2730e-04, 1.0747e-04, + 1.0299e-04, 1.0970e-04], device='cuda:0') +2023-04-27 02:32:59,628 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=63977.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:33:16,569 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-64000.pt +2023-04-27 02:33:20,846 INFO [finetune.py:976] (0/7) Epoch 12, batch 1000, loss[loss=0.2339, simple_loss=0.2999, pruned_loss=0.08399, over 4827.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2535, pruned_loss=0.05929, over 949784.59 frames. ], batch size: 33, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:33:32,468 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64023.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:33:43,219 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.750e+01 1.665e+02 1.998e+02 2.380e+02 4.049e+02, threshold=3.995e+02, percent-clipped=1.0 +2023-04-27 02:33:46,811 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9108, 2.0507, 0.7910, 1.1217, 1.5848, 1.1343, 2.3872, 1.3148], + device='cuda:0'), covar=tensor([0.0630, 0.0622, 0.0718, 0.1315, 0.0436, 0.0993, 0.0380, 0.0779], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0051, 0.0052, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 02:33:59,166 INFO [finetune.py:976] (0/7) Epoch 12, batch 1050, loss[loss=0.17, simple_loss=0.2375, pruned_loss=0.05131, over 4837.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2564, pruned_loss=0.06019, over 952413.59 frames. ], batch size: 25, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:34:29,314 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64076.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:34:30,589 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64078.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:34:40,101 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64084.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:34:51,945 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7672, 1.9872, 1.6856, 1.8316, 1.3309, 1.6390, 1.7648, 1.3533], + device='cuda:0'), covar=tensor([0.1600, 0.1112, 0.0962, 0.1084, 0.3696, 0.1079, 0.1468, 0.2217], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0315, 0.0226, 0.0283, 0.0312, 0.0268, 0.0254, 0.0275], + device='cuda:0'), out_proj_covar=tensor([1.1879e-04, 1.2615e-04, 9.0257e-05, 1.1319e-04, 1.2751e-04, 1.0743e-04, + 1.0323e-04, 1.1011e-04], device='cuda:0') +2023-04-27 02:35:05,822 INFO [finetune.py:976] (0/7) Epoch 12, batch 1100, loss[loss=0.2268, simple_loss=0.2938, pruned_loss=0.0799, over 4858.00 frames. ], tot_loss[loss=0.19, simple_loss=0.2577, pruned_loss=0.06111, over 950703.53 frames. ], batch size: 31, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:35:28,496 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=64124.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:35:29,711 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=64126.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:35:38,340 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.705e+02 2.099e+02 2.623e+02 4.712e+02, threshold=4.198e+02, percent-clipped=5.0 +2023-04-27 02:35:49,424 INFO [finetune.py:976] (0/7) Epoch 12, batch 1150, loss[loss=0.1859, simple_loss=0.2656, pruned_loss=0.05308, over 4875.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2582, pruned_loss=0.06123, over 951174.03 frames. ], batch size: 32, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:35:57,667 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64166.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:36:19,852 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64201.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:36:19,959 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-27 02:36:22,239 INFO [finetune.py:976] (0/7) Epoch 12, batch 1200, loss[loss=0.2059, simple_loss=0.2602, pruned_loss=0.07577, over 4827.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2566, pruned_loss=0.06037, over 953251.65 frames. ], batch size: 25, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:36:36,042 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64225.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:36:40,396 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64232.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:36:50,582 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.610e+02 1.933e+02 2.309e+02 4.139e+02, threshold=3.865e+02, percent-clipped=0.0 +2023-04-27 02:36:51,903 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64242.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:37:10,837 INFO [finetune.py:976] (0/7) Epoch 12, batch 1250, loss[loss=0.1594, simple_loss=0.231, pruned_loss=0.04389, over 4819.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2538, pruned_loss=0.05973, over 951990.85 frames. ], batch size: 40, lr: 3.66e-03, grad_scale: 64.0 +2023-04-27 02:37:12,795 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7550, 2.4009, 1.6734, 1.6432, 1.2566, 1.2936, 1.7630, 1.1647], + device='cuda:0'), covar=tensor([0.1586, 0.1185, 0.1470, 0.1833, 0.2297, 0.1914, 0.0994, 0.2074], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0213, 0.0169, 0.0204, 0.0201, 0.0183, 0.0157, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 02:37:33,238 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64272.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:37:33,823 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=64273.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:37:53,853 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.75 vs. limit=5.0 +2023-04-27 02:37:55,440 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=64290.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:37:57,339 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64293.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:38:16,202 INFO [finetune.py:976] (0/7) Epoch 12, batch 1300, loss[loss=0.1515, simple_loss=0.2129, pruned_loss=0.04505, over 4774.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2518, pruned_loss=0.05905, over 953000.96 frames. ], batch size: 26, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:38:40,958 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.564e+02 1.871e+02 2.311e+02 4.814e+02, threshold=3.742e+02, percent-clipped=4.0 +2023-04-27 02:38:43,500 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64345.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:38:49,942 INFO [finetune.py:976] (0/7) Epoch 12, batch 1350, loss[loss=0.2241, simple_loss=0.2807, pruned_loss=0.08376, over 4894.00 frames. ], tot_loss[loss=0.185, simple_loss=0.252, pruned_loss=0.05901, over 954879.54 frames. ], batch size: 32, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:38:50,775 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-27 02:39:07,138 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64379.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:39:18,747 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5743, 1.3006, 1.7641, 1.8841, 1.6389, 1.6125, 1.6958, 1.6896], + device='cuda:0'), covar=tensor([0.6723, 0.8763, 0.9490, 1.0566, 0.8166, 1.1182, 1.0700, 1.1170], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0410, 0.0498, 0.0517, 0.0443, 0.0463, 0.0469, 0.0471], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:39:22,954 INFO [finetune.py:976] (0/7) Epoch 12, batch 1400, loss[loss=0.2717, simple_loss=0.3334, pruned_loss=0.105, over 4257.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.256, pruned_loss=0.06015, over 954542.76 frames. ], batch size: 65, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:39:23,728 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64406.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:39:38,059 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8920, 1.6570, 2.0924, 2.2870, 1.9429, 1.8753, 1.9648, 1.9669], + device='cuda:0'), covar=tensor([0.5444, 0.7666, 0.8290, 0.7026, 0.6829, 1.0257, 0.9688, 1.0224], + device='cuda:0'), in_proj_covar=tensor([0.0411, 0.0410, 0.0498, 0.0516, 0.0442, 0.0462, 0.0469, 0.0471], + device='cuda:0'), out_proj_covar=tensor([9.9960e-05, 1.0160e-04, 1.1232e-04, 1.2241e-04, 1.0705e-04, 1.1157e-04, + 1.1232e-04, 1.1273e-04], device='cuda:0') +2023-04-27 02:39:48,147 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.746e+02 2.220e+02 2.632e+02 4.800e+02, threshold=4.440e+02, percent-clipped=5.0 +2023-04-27 02:39:50,152 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-27 02:40:07,659 INFO [finetune.py:976] (0/7) Epoch 12, batch 1450, loss[loss=0.1799, simple_loss=0.2473, pruned_loss=0.05626, over 4906.00 frames. ], tot_loss[loss=0.1891, simple_loss=0.257, pruned_loss=0.06065, over 954409.50 frames. ], batch size: 35, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:40:21,222 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64465.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:40:21,828 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64466.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:40:55,246 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64501.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:40:57,604 INFO [finetune.py:976] (0/7) Epoch 12, batch 1500, loss[loss=0.2511, simple_loss=0.3037, pruned_loss=0.09926, over 4836.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2583, pruned_loss=0.06134, over 954612.63 frames. ], batch size: 49, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:41:03,987 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=64514.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:41:12,848 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64526.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:41:16,505 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2795, 1.4616, 1.4516, 1.7167, 1.5734, 1.8239, 1.3209, 3.3680], + device='cuda:0'), covar=tensor([0.0632, 0.0911, 0.0900, 0.1322, 0.0757, 0.0542, 0.0897, 0.0199], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 02:41:22,337 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.091e+02 1.729e+02 2.014e+02 2.511e+02 4.004e+02, threshold=4.028e+02, percent-clipped=0.0 +2023-04-27 02:41:27,205 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=64549.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:41:30,769 INFO [finetune.py:976] (0/7) Epoch 12, batch 1550, loss[loss=0.1781, simple_loss=0.2415, pruned_loss=0.05735, over 4824.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2586, pruned_loss=0.06113, over 955494.77 frames. ], batch size: 38, lr: 3.66e-03, grad_scale: 32.0 +2023-04-27 02:41:42,610 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64572.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:41:53,767 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64588.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:41:54,940 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64589.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:42:10,373 INFO [finetune.py:976] (0/7) Epoch 12, batch 1600, loss[loss=0.2193, simple_loss=0.2727, pruned_loss=0.08298, over 4694.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.255, pruned_loss=0.05958, over 954088.94 frames. ], batch size: 59, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:42:28,461 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0086, 1.0124, 1.2485, 1.1579, 0.9929, 0.8970, 1.0202, 0.6791], + device='cuda:0'), covar=tensor([0.0636, 0.0668, 0.0597, 0.0541, 0.0758, 0.1290, 0.0493, 0.0833], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0072, 0.0071, 0.0067, 0.0076, 0.0097, 0.0076, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 02:42:31,725 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=64620.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:42:46,405 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.047e+02 1.700e+02 1.959e+02 2.289e+02 5.207e+02, threshold=3.917e+02, percent-clipped=1.0 +2023-04-27 02:42:56,554 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64648.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:42:57,775 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64650.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:43:06,461 INFO [finetune.py:976] (0/7) Epoch 12, batch 1650, loss[loss=0.1873, simple_loss=0.2501, pruned_loss=0.06221, over 4830.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2525, pruned_loss=0.05909, over 953557.44 frames. ], batch size: 38, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:43:20,845 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0109, 1.5522, 1.4270, 1.7168, 1.6443, 1.6742, 1.3810, 2.9676], + device='cuda:0'), covar=tensor([0.0702, 0.0799, 0.0745, 0.1176, 0.0601, 0.0545, 0.0726, 0.0175], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 02:43:36,853 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64679.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:44:04,111 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64701.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:44:11,237 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64703.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:44:12,323 INFO [finetune.py:976] (0/7) Epoch 12, batch 1700, loss[loss=0.2304, simple_loss=0.297, pruned_loss=0.0819, over 4817.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2508, pruned_loss=0.05835, over 954965.07 frames. ], batch size: 39, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:44:14,892 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64709.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:44:27,398 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=64727.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:44:37,467 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.591e+02 1.842e+02 2.298e+02 5.493e+02, threshold=3.684e+02, percent-clipped=1.0 +2023-04-27 02:44:37,601 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0946, 2.0086, 1.7239, 1.7379, 2.1948, 1.7777, 2.6595, 1.4650], + device='cuda:0'), covar=tensor([0.3822, 0.1782, 0.4770, 0.3096, 0.1629, 0.2313, 0.1456, 0.4486], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0348, 0.0431, 0.0360, 0.0386, 0.0383, 0.0376, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:44:40,046 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64745.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:44:44,757 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64752.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:44:46,473 INFO [finetune.py:976] (0/7) Epoch 12, batch 1750, loss[loss=0.2472, simple_loss=0.3218, pruned_loss=0.08625, over 4838.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2536, pruned_loss=0.05963, over 954310.05 frames. ], batch size: 47, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:44:52,001 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64764.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:45:11,856 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6191, 1.7924, 1.4178, 1.0470, 1.2000, 1.1715, 1.4226, 1.1544], + device='cuda:0'), covar=tensor([0.1609, 0.1320, 0.1565, 0.1968, 0.2347, 0.2025, 0.1056, 0.2052], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0214, 0.0169, 0.0204, 0.0201, 0.0183, 0.0157, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 02:45:20,205 INFO [finetune.py:976] (0/7) Epoch 12, batch 1800, loss[loss=0.2249, simple_loss=0.2977, pruned_loss=0.07602, over 4831.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2555, pruned_loss=0.05976, over 954336.91 frames. ], batch size: 47, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:45:20,912 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64806.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:45:25,147 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=64813.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:45:29,901 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64821.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:45:37,631 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1106, 2.5972, 2.2904, 2.4116, 1.8495, 2.0627, 2.1647, 1.7800], + device='cuda:0'), covar=tensor([0.1952, 0.1321, 0.0765, 0.1127, 0.3002, 0.1239, 0.1843, 0.2636], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0310, 0.0223, 0.0279, 0.0309, 0.0264, 0.0250, 0.0271], + device='cuda:0'), out_proj_covar=tensor([1.1675e-04, 1.2444e-04, 8.9113e-05, 1.1149e-04, 1.2608e-04, 1.0600e-04, + 1.0180e-04, 1.0846e-04], device='cuda:0') +2023-04-27 02:45:40,620 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2368, 1.5564, 1.7111, 1.8278, 1.6376, 1.7356, 1.7416, 1.7224], + device='cuda:0'), covar=tensor([0.5005, 0.6536, 0.5219, 0.4861, 0.6483, 0.8950, 0.6577, 0.5985], + device='cuda:0'), in_proj_covar=tensor([0.0328, 0.0375, 0.0315, 0.0326, 0.0338, 0.0397, 0.0358, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 02:46:00,004 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.632e+02 2.043e+02 2.586e+02 5.836e+02, threshold=4.087e+02, percent-clipped=7.0 +2023-04-27 02:46:21,073 INFO [finetune.py:976] (0/7) Epoch 12, batch 1850, loss[loss=0.2266, simple_loss=0.2839, pruned_loss=0.08459, over 4814.00 frames. ], tot_loss[loss=0.1885, simple_loss=0.2567, pruned_loss=0.06013, over 954368.17 frames. ], batch size: 40, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:46:58,538 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=64888.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:47:10,166 INFO [finetune.py:976] (0/7) Epoch 12, batch 1900, loss[loss=0.1574, simple_loss=0.2306, pruned_loss=0.04205, over 4788.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2572, pruned_loss=0.06012, over 952870.34 frames. ], batch size: 51, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:47:18,587 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2418, 1.6491, 2.1345, 2.6529, 2.1721, 1.6900, 1.4732, 1.9744], + device='cuda:0'), covar=tensor([0.3044, 0.3285, 0.1549, 0.2311, 0.2669, 0.2573, 0.4300, 0.2199], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0248, 0.0222, 0.0315, 0.0213, 0.0228, 0.0230, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 02:47:21,147 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 02:47:24,063 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3009, 2.0491, 2.4277, 2.6965, 2.7503, 2.1538, 1.8891, 2.4398], + device='cuda:0'), covar=tensor([0.0832, 0.0990, 0.0528, 0.0525, 0.0563, 0.0847, 0.0850, 0.0505], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0201, 0.0181, 0.0173, 0.0177, 0.0187, 0.0158, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:47:35,069 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=64936.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:47:39,044 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.650e+02 1.962e+02 2.439e+02 4.068e+02, threshold=3.925e+02, percent-clipped=0.0 +2023-04-27 02:47:48,347 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=64945.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:48:00,716 INFO [finetune.py:976] (0/7) Epoch 12, batch 1950, loss[loss=0.1651, simple_loss=0.2353, pruned_loss=0.04748, over 4907.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2557, pruned_loss=0.05972, over 954584.05 frames. ], batch size: 37, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:48:31,516 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=64976.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:48:49,856 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65001.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:48:51,661 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65004.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:48:52,188 INFO [finetune.py:976] (0/7) Epoch 12, batch 2000, loss[loss=0.1577, simple_loss=0.2334, pruned_loss=0.04095, over 4848.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2524, pruned_loss=0.05819, over 953833.49 frames. ], batch size: 47, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:49:12,502 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65037.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 02:49:15,251 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.562e+02 1.931e+02 2.366e+02 5.248e+02, threshold=3.862e+02, percent-clipped=2.0 +2023-04-27 02:49:21,187 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=65049.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:49:26,343 INFO [finetune.py:976] (0/7) Epoch 12, batch 2050, loss[loss=0.1794, simple_loss=0.2564, pruned_loss=0.05121, over 4794.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2499, pruned_loss=0.05794, over 954098.51 frames. ], batch size: 51, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:49:26,439 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65055.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:49:28,832 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65059.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:49:53,699 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9290, 1.3342, 5.1741, 4.8332, 4.5914, 5.0262, 4.5719, 4.6454], + device='cuda:0'), covar=tensor([0.6776, 0.6528, 0.0979, 0.1787, 0.0994, 0.1627, 0.1563, 0.1590], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0300, 0.0395, 0.0400, 0.0342, 0.0399, 0.0310, 0.0362], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:49:56,663 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65101.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:50:00,020 INFO [finetune.py:976] (0/7) Epoch 12, batch 2100, loss[loss=0.1766, simple_loss=0.2311, pruned_loss=0.06108, over 4229.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.25, pruned_loss=0.05841, over 953204.39 frames. ], batch size: 18, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:50:01,907 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65108.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:50:06,862 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65116.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:50:10,318 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65121.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:50:22,438 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.293e+02 1.658e+02 1.987e+02 2.538e+02 5.198e+02, threshold=3.973e+02, percent-clipped=2.0 +2023-04-27 02:50:33,407 INFO [finetune.py:976] (0/7) Epoch 12, batch 2150, loss[loss=0.1831, simple_loss=0.2547, pruned_loss=0.05572, over 4835.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2535, pruned_loss=0.05936, over 953806.09 frames. ], batch size: 30, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:50:36,578 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5754, 1.1364, 1.6449, 1.9858, 1.6465, 1.5207, 1.5946, 1.6488], + device='cuda:0'), covar=tensor([0.5840, 0.8472, 0.8576, 0.8085, 0.7405, 1.0051, 0.9935, 0.9898], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0407, 0.0498, 0.0515, 0.0441, 0.0461, 0.0467, 0.0470], + device='cuda:0'), out_proj_covar=tensor([9.9474e-05, 1.0092e-04, 1.1221e-04, 1.2217e-04, 1.0686e-04, 1.1122e-04, + 1.1184e-04, 1.1251e-04], device='cuda:0') +2023-04-27 02:50:42,397 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=65169.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:51:05,956 INFO [finetune.py:976] (0/7) Epoch 12, batch 2200, loss[loss=0.1813, simple_loss=0.2574, pruned_loss=0.05262, over 4923.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2567, pruned_loss=0.0601, over 954416.24 frames. ], batch size: 29, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:51:56,237 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.743e+02 2.057e+02 2.570e+02 5.251e+02, threshold=4.115e+02, percent-clipped=3.0 +2023-04-27 02:51:58,818 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65245.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:52:11,636 INFO [finetune.py:976] (0/7) Epoch 12, batch 2250, loss[loss=0.1803, simple_loss=0.2499, pruned_loss=0.05537, over 4752.00 frames. ], tot_loss[loss=0.1895, simple_loss=0.2582, pruned_loss=0.06042, over 955328.67 frames. ], batch size: 27, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:52:39,970 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4878, 1.2955, 1.6954, 1.6948, 1.3151, 1.2232, 1.3666, 0.8262], + device='cuda:0'), covar=tensor([0.0631, 0.0838, 0.0484, 0.0700, 0.0894, 0.1297, 0.0651, 0.0822], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0072, 0.0071, 0.0068, 0.0076, 0.0097, 0.0076, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 02:52:53,423 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65285.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:52:53,570 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-04-27 02:53:02,684 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-27 02:53:03,640 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=65293.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:53:21,378 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65304.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:53:21,920 INFO [finetune.py:976] (0/7) Epoch 12, batch 2300, loss[loss=0.1814, simple_loss=0.2605, pruned_loss=0.05121, over 4812.00 frames. ], tot_loss[loss=0.1899, simple_loss=0.2589, pruned_loss=0.06049, over 956265.55 frames. ], batch size: 38, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:53:24,351 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4332, 1.3360, 4.1403, 3.9038, 3.7097, 4.0058, 3.9400, 3.5837], + device='cuda:0'), covar=tensor([0.6955, 0.5669, 0.1066, 0.1656, 0.1015, 0.1597, 0.1379, 0.1493], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0303, 0.0401, 0.0406, 0.0347, 0.0406, 0.0313, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:53:56,229 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65332.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 02:54:07,780 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.005e+02 1.597e+02 1.792e+02 2.067e+02 3.757e+02, threshold=3.584e+02, percent-clipped=0.0 +2023-04-27 02:54:16,339 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65346.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:54:20,388 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=65352.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:54:22,655 INFO [finetune.py:976] (0/7) Epoch 12, batch 2350, loss[loss=0.163, simple_loss=0.2325, pruned_loss=0.04679, over 4813.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2572, pruned_loss=0.06068, over 957491.64 frames. ], batch size: 40, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:54:22,804 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5914, 0.7216, 1.4097, 1.9575, 1.6664, 1.5159, 1.5197, 1.5191], + device='cuda:0'), covar=tensor([0.4810, 0.6615, 0.6198, 0.6876, 0.6234, 0.7799, 0.7417, 0.7620], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0406, 0.0496, 0.0514, 0.0440, 0.0461, 0.0466, 0.0470], + device='cuda:0'), out_proj_covar=tensor([9.9455e-05, 1.0055e-04, 1.1193e-04, 1.2187e-04, 1.0652e-04, 1.1121e-04, + 1.1169e-04, 1.1242e-04], device='cuda:0') +2023-04-27 02:54:25,629 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65359.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:54:37,630 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-04-27 02:54:50,629 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8559, 2.2645, 0.9833, 1.5771, 2.4183, 1.7804, 1.6487, 1.7778], + device='cuda:0'), covar=tensor([0.0488, 0.0334, 0.0316, 0.0555, 0.0219, 0.0523, 0.0494, 0.0544], + device='cuda:0'), in_proj_covar=tensor([0.0030, 0.0025, 0.0023, 0.0030, 0.0020, 0.0029, 0.0029, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0050, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 02:54:53,230 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65401.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:54:55,972 INFO [finetune.py:976] (0/7) Epoch 12, batch 2400, loss[loss=0.1803, simple_loss=0.2466, pruned_loss=0.05698, over 4859.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2536, pruned_loss=0.0595, over 958755.14 frames. ], batch size: 49, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:54:57,714 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=65407.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:54:58,354 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65408.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:55:00,170 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65411.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:55:20,644 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.563e+02 1.962e+02 2.388e+02 3.739e+02, threshold=3.925e+02, percent-clipped=1.0 +2023-04-27 02:55:25,584 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=65449.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:55:29,227 INFO [finetune.py:976] (0/7) Epoch 12, batch 2450, loss[loss=0.1732, simple_loss=0.2411, pruned_loss=0.05269, over 4899.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2513, pruned_loss=0.05913, over 959397.27 frames. ], batch size: 35, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:55:30,344 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=65456.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:55:54,491 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4762, 1.2382, 1.6863, 1.6484, 1.3172, 1.2508, 1.3391, 0.8426], + device='cuda:0'), covar=tensor([0.0603, 0.0893, 0.0470, 0.0704, 0.0839, 0.1256, 0.0693, 0.0818], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0071, 0.0070, 0.0067, 0.0075, 0.0096, 0.0076, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 02:56:02,902 INFO [finetune.py:976] (0/7) Epoch 12, batch 2500, loss[loss=0.2132, simple_loss=0.2878, pruned_loss=0.06935, over 4907.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2541, pruned_loss=0.06059, over 958242.62 frames. ], batch size: 37, lr: 3.65e-03, grad_scale: 32.0 +2023-04-27 02:56:11,866 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8833, 1.5366, 1.9271, 2.0813, 1.6135, 1.5112, 1.5980, 0.9933], + device='cuda:0'), covar=tensor([0.0532, 0.0918, 0.0548, 0.0596, 0.0732, 0.1265, 0.0621, 0.0837], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0071, 0.0071, 0.0067, 0.0075, 0.0097, 0.0076, 0.0071], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 02:56:22,212 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65532.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 02:56:28,027 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.207e+02 1.847e+02 2.256e+02 2.656e+02 4.911e+02, threshold=4.512e+02, percent-clipped=5.0 +2023-04-27 02:56:31,828 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0904, 1.4523, 1.3634, 1.6566, 1.5694, 1.8730, 1.3138, 3.3634], + device='cuda:0'), covar=tensor([0.0697, 0.0877, 0.0853, 0.1301, 0.0683, 0.0480, 0.0819, 0.0169], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 02:56:36,598 INFO [finetune.py:976] (0/7) Epoch 12, batch 2550, loss[loss=0.2004, simple_loss=0.2714, pruned_loss=0.06468, over 4757.00 frames. ], tot_loss[loss=0.1894, simple_loss=0.2568, pruned_loss=0.06099, over 957552.09 frames. ], batch size: 54, lr: 3.65e-03, grad_scale: 16.0 +2023-04-27 02:56:36,781 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-27 02:56:44,321 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5575, 1.2134, 3.9671, 3.7443, 3.5256, 3.7243, 3.6596, 3.5029], + device='cuda:0'), covar=tensor([0.6466, 0.5583, 0.0926, 0.1479, 0.1000, 0.1630, 0.2382, 0.1357], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0302, 0.0399, 0.0402, 0.0345, 0.0403, 0.0313, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 02:57:02,298 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65593.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 02:57:02,450 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-27 02:57:04,642 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-27 02:57:10,044 INFO [finetune.py:976] (0/7) Epoch 12, batch 2600, loss[loss=0.2111, simple_loss=0.2835, pruned_loss=0.0694, over 4923.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.2575, pruned_loss=0.06133, over 957233.60 frames. ], batch size: 38, lr: 3.65e-03, grad_scale: 16.0 +2023-04-27 02:57:19,579 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0211, 1.6775, 1.5015, 1.7213, 2.1910, 1.8086, 1.5024, 1.4200], + device='cuda:0'), covar=tensor([0.1355, 0.1292, 0.1961, 0.1134, 0.0784, 0.1509, 0.1987, 0.1905], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0320, 0.0356, 0.0299, 0.0335, 0.0320, 0.0309, 0.0364], + device='cuda:0'), out_proj_covar=tensor([6.4614e-05, 6.7368e-05, 7.6585e-05, 6.1477e-05, 6.9945e-05, 6.8171e-05, + 6.5862e-05, 7.7981e-05], device='cuda:0') +2023-04-27 02:57:29,323 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65632.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:57:34,706 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65641.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:57:35,230 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.708e+02 2.004e+02 2.304e+02 5.079e+02, threshold=4.008e+02, percent-clipped=1.0 +2023-04-27 02:57:54,580 INFO [finetune.py:976] (0/7) Epoch 12, batch 2650, loss[loss=0.1616, simple_loss=0.2436, pruned_loss=0.03978, over 4690.00 frames. ], tot_loss[loss=0.1909, simple_loss=0.2584, pruned_loss=0.06166, over 955763.49 frames. ], batch size: 59, lr: 3.65e-03, grad_scale: 16.0 +2023-04-27 02:58:26,141 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=65680.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:58:54,680 INFO [finetune.py:976] (0/7) Epoch 12, batch 2700, loss[loss=0.2077, simple_loss=0.2765, pruned_loss=0.06949, over 4898.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2581, pruned_loss=0.06122, over 957014.48 frames. ], batch size: 32, lr: 3.65e-03, grad_scale: 16.0 +2023-04-27 02:59:03,988 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65711.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 02:59:41,215 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-27 02:59:42,190 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.570e+02 1.817e+02 2.252e+02 5.618e+02, threshold=3.635e+02, percent-clipped=2.0 +2023-04-27 03:00:01,384 INFO [finetune.py:976] (0/7) Epoch 12, batch 2750, loss[loss=0.1509, simple_loss=0.2194, pruned_loss=0.04117, over 4824.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2551, pruned_loss=0.06044, over 956842.75 frames. ], batch size: 30, lr: 3.65e-03, grad_scale: 16.0 +2023-04-27 03:00:03,880 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=65759.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:01:07,125 INFO [finetune.py:976] (0/7) Epoch 12, batch 2800, loss[loss=0.2034, simple_loss=0.2521, pruned_loss=0.07739, over 4794.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2522, pruned_loss=0.05927, over 957617.72 frames. ], batch size: 51, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:01:51,945 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=65838.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:01:54,199 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7564, 2.0591, 0.9218, 1.1755, 1.6225, 1.0844, 2.2736, 1.3415], + device='cuda:0'), covar=tensor([0.0610, 0.0689, 0.0626, 0.1013, 0.0369, 0.0833, 0.0273, 0.0576], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 03:01:59,670 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.664e+02 1.878e+02 2.585e+02 4.047e+02, threshold=3.756e+02, percent-clipped=2.0 +2023-04-27 03:02:08,131 INFO [finetune.py:976] (0/7) Epoch 12, batch 2850, loss[loss=0.1881, simple_loss=0.271, pruned_loss=0.05262, over 4855.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2507, pruned_loss=0.05848, over 957251.71 frames. ], batch size: 44, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:02:29,537 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=65888.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:02:38,191 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=65899.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:02:41,704 INFO [finetune.py:976] (0/7) Epoch 12, batch 2900, loss[loss=0.2103, simple_loss=0.2846, pruned_loss=0.06796, over 4801.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2539, pruned_loss=0.06016, over 955853.22 frames. ], batch size: 45, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:03:05,042 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=65941.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:03:05,560 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.180e+02 1.732e+02 2.023e+02 2.445e+02 4.251e+02, threshold=4.047e+02, percent-clipped=2.0 +2023-04-27 03:03:15,318 INFO [finetune.py:976] (0/7) Epoch 12, batch 2950, loss[loss=0.1674, simple_loss=0.245, pruned_loss=0.04488, over 4913.00 frames. ], tot_loss[loss=0.1878, simple_loss=0.2554, pruned_loss=0.06007, over 956984.98 frames. ], batch size: 36, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:03:19,107 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9521, 2.2721, 2.1881, 2.3621, 2.0845, 2.1888, 2.2798, 2.2117], + device='cuda:0'), covar=tensor([0.5315, 0.7399, 0.5736, 0.5757, 0.6851, 0.8542, 0.7033, 0.6276], + device='cuda:0'), in_proj_covar=tensor([0.0326, 0.0375, 0.0314, 0.0324, 0.0335, 0.0395, 0.0354, 0.0322], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 03:03:36,943 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=65989.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:03:42,895 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.75 vs. limit=5.0 +2023-04-27 03:03:45,046 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-66000.pt +2023-04-27 03:03:49,752 INFO [finetune.py:976] (0/7) Epoch 12, batch 3000, loss[loss=0.2132, simple_loss=0.2773, pruned_loss=0.07458, over 4831.00 frames. ], tot_loss[loss=0.1893, simple_loss=0.2567, pruned_loss=0.06094, over 955321.06 frames. ], batch size: 47, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:03:49,753 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 03:03:51,788 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2007, 1.4551, 1.6707, 1.7925, 1.7299, 1.8649, 1.6954, 1.7584], + device='cuda:0'), covar=tensor([0.4367, 0.6312, 0.5964, 0.5756, 0.6595, 0.8480, 0.6865, 0.5829], + device='cuda:0'), in_proj_covar=tensor([0.0328, 0.0376, 0.0316, 0.0326, 0.0337, 0.0397, 0.0357, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 03:03:57,131 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1477, 2.5100, 1.0133, 1.4079, 1.9082, 1.3285, 3.0395, 1.6695], + device='cuda:0'), covar=tensor([0.0619, 0.0557, 0.0705, 0.1270, 0.0433, 0.0885, 0.0268, 0.0588], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 03:04:00,411 INFO [finetune.py:1010] (0/7) Epoch 12, validation: loss=0.1529, simple_loss=0.2247, pruned_loss=0.04052, over 2265189.00 frames. +2023-04-27 03:04:00,411 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 03:04:13,920 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9751, 2.6174, 0.8929, 1.3083, 1.8896, 1.2456, 3.5576, 1.6053], + device='cuda:0'), covar=tensor([0.0877, 0.0777, 0.0979, 0.1772, 0.0681, 0.1305, 0.0367, 0.0881], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 03:04:29,440 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.787e+02 2.243e+02 2.841e+02 1.392e+03, threshold=4.486e+02, percent-clipped=4.0 +2023-04-27 03:04:38,275 INFO [finetune.py:976] (0/7) Epoch 12, batch 3050, loss[loss=0.1839, simple_loss=0.2514, pruned_loss=0.05825, over 4852.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.2581, pruned_loss=0.06117, over 957342.06 frames. ], batch size: 31, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:05:08,266 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2553, 1.6267, 1.5974, 1.9306, 1.7039, 2.0526, 1.4622, 3.6602], + device='cuda:0'), covar=tensor([0.0667, 0.0842, 0.0774, 0.1198, 0.0645, 0.0462, 0.0753, 0.0144], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0039, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 03:05:10,512 INFO [finetune.py:976] (0/7) Epoch 12, batch 3100, loss[loss=0.1459, simple_loss=0.2127, pruned_loss=0.0395, over 4820.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2557, pruned_loss=0.06042, over 956455.05 frames. ], batch size: 25, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:05:19,416 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.2639, 2.9006, 3.2902, 3.6915, 3.3806, 3.1783, 2.6355, 3.4145], + device='cuda:0'), covar=tensor([0.0765, 0.0878, 0.0504, 0.0465, 0.0556, 0.0716, 0.0733, 0.0480], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0205, 0.0186, 0.0177, 0.0181, 0.0192, 0.0160, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:05:39,731 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0392, 1.5441, 1.6637, 1.6827, 2.2438, 1.8501, 1.5084, 1.4938], + device='cuda:0'), covar=tensor([0.1346, 0.1517, 0.1734, 0.1292, 0.0672, 0.1430, 0.2003, 0.1983], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0321, 0.0353, 0.0297, 0.0334, 0.0318, 0.0307, 0.0360], + device='cuda:0'), out_proj_covar=tensor([6.4557e-05, 6.7486e-05, 7.5825e-05, 6.0900e-05, 6.9756e-05, 6.7663e-05, + 6.5350e-05, 7.7146e-05], device='cuda:0') +2023-04-27 03:05:40,809 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.130e+02 1.687e+02 1.900e+02 2.210e+02 4.094e+02, threshold=3.799e+02, percent-clipped=0.0 +2023-04-27 03:05:41,561 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3583, 3.2532, 2.6644, 2.7926, 2.3721, 2.6879, 2.7336, 2.0893], + device='cuda:0'), covar=tensor([0.2502, 0.1343, 0.0846, 0.1389, 0.2893, 0.1264, 0.1809, 0.2740], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0309, 0.0222, 0.0281, 0.0310, 0.0261, 0.0250, 0.0271], + device='cuda:0'), out_proj_covar=tensor([1.1668e-04, 1.2366e-04, 8.8855e-05, 1.1227e-04, 1.2669e-04, 1.0473e-04, + 1.0155e-04, 1.0839e-04], device='cuda:0') +2023-04-27 03:05:42,177 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66144.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:05:54,637 INFO [finetune.py:976] (0/7) Epoch 12, batch 3150, loss[loss=0.225, simple_loss=0.2728, pruned_loss=0.08858, over 4938.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2521, pruned_loss=0.05882, over 955739.42 frames. ], batch size: 33, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:05:58,696 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66160.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:06:08,720 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4416, 1.9521, 2.4280, 2.8838, 2.3900, 1.8789, 1.6589, 2.1149], + device='cuda:0'), covar=tensor([0.3444, 0.3348, 0.1576, 0.2252, 0.2721, 0.2772, 0.4172, 0.2264], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0248, 0.0221, 0.0315, 0.0214, 0.0228, 0.0229, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 03:06:27,523 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66188.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:06:36,577 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66194.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 03:06:38,566 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-27 03:06:49,145 INFO [finetune.py:976] (0/7) Epoch 12, batch 3200, loss[loss=0.1967, simple_loss=0.2557, pruned_loss=0.06886, over 4858.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2489, pruned_loss=0.05745, over 955345.76 frames. ], batch size: 44, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:06:49,261 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66205.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:07:12,337 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66221.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 03:07:34,229 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=66236.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:07:35,064 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-27 03:07:43,560 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.908e+01 1.563e+02 1.889e+02 2.258e+02 3.990e+02, threshold=3.778e+02, percent-clipped=1.0 +2023-04-27 03:07:57,820 INFO [finetune.py:976] (0/7) Epoch 12, batch 3250, loss[loss=0.1933, simple_loss=0.2737, pruned_loss=0.0565, over 4838.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2492, pruned_loss=0.0576, over 953696.35 frames. ], batch size: 47, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:07:59,770 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66258.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:08:47,969 INFO [finetune.py:976] (0/7) Epoch 12, batch 3300, loss[loss=0.2213, simple_loss=0.2968, pruned_loss=0.07283, over 4800.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2539, pruned_loss=0.05911, over 954654.90 frames. ], batch size: 41, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:08:51,890 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-27 03:08:55,726 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-27 03:08:57,556 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66319.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:09:01,702 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-27 03:09:13,362 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.300e+02 1.708e+02 2.133e+02 2.444e+02 6.112e+02, threshold=4.266e+02, percent-clipped=2.0 +2023-04-27 03:09:21,693 INFO [finetune.py:976] (0/7) Epoch 12, batch 3350, loss[loss=0.2595, simple_loss=0.3239, pruned_loss=0.0976, over 4900.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2566, pruned_loss=0.0601, over 953211.58 frames. ], batch size: 43, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:09:42,922 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7415, 2.4636, 1.8698, 1.7917, 1.2276, 1.2663, 1.9149, 1.2149], + device='cuda:0'), covar=tensor([0.1790, 0.1391, 0.1572, 0.1903, 0.2515, 0.2078, 0.1031, 0.2177], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0214, 0.0170, 0.0204, 0.0203, 0.0184, 0.0158, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 03:09:54,777 INFO [finetune.py:976] (0/7) Epoch 12, batch 3400, loss[loss=0.1943, simple_loss=0.2733, pruned_loss=0.05768, over 4789.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2581, pruned_loss=0.0607, over 952897.71 frames. ], batch size: 25, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:09:54,893 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4167, 3.3451, 2.7818, 2.8796, 2.5251, 2.7909, 2.7979, 2.2724], + device='cuda:0'), covar=tensor([0.2281, 0.1128, 0.0794, 0.1374, 0.2603, 0.1099, 0.1850, 0.2465], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0309, 0.0221, 0.0281, 0.0310, 0.0261, 0.0250, 0.0270], + device='cuda:0'), out_proj_covar=tensor([1.1650e-04, 1.2360e-04, 8.8540e-05, 1.1215e-04, 1.2642e-04, 1.0458e-04, + 1.0168e-04, 1.0792e-04], device='cuda:0') +2023-04-27 03:10:18,498 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6038, 1.9747, 1.5391, 1.4132, 1.1497, 1.1708, 1.5714, 1.0929], + device='cuda:0'), covar=tensor([0.1650, 0.1328, 0.1554, 0.1827, 0.2502, 0.1991, 0.1045, 0.2162], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0215, 0.0171, 0.0204, 0.0204, 0.0185, 0.0158, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 03:10:20,215 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.592e+02 1.894e+02 2.195e+02 3.276e+02, threshold=3.787e+02, percent-clipped=0.0 +2023-04-27 03:10:28,164 INFO [finetune.py:976] (0/7) Epoch 12, batch 3450, loss[loss=0.1685, simple_loss=0.2387, pruned_loss=0.04911, over 4768.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.256, pruned_loss=0.05969, over 951842.34 frames. ], batch size: 25, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:10:32,555 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-27 03:10:55,270 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66494.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:10:58,885 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66500.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 03:11:01,855 INFO [finetune.py:976] (0/7) Epoch 12, batch 3500, loss[loss=0.2192, simple_loss=0.2701, pruned_loss=0.08416, over 4727.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2538, pruned_loss=0.05945, over 950491.55 frames. ], batch size: 59, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:11:09,115 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66516.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 03:11:12,191 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66521.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:11:16,879 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6229, 1.0330, 1.6684, 2.0570, 1.7080, 1.6365, 1.6680, 1.6744], + device='cuda:0'), covar=tensor([0.5073, 0.6902, 0.6883, 0.7371, 0.6555, 0.8303, 0.8103, 0.7649], + device='cuda:0'), in_proj_covar=tensor([0.0411, 0.0406, 0.0497, 0.0515, 0.0439, 0.0462, 0.0467, 0.0470], + device='cuda:0'), out_proj_covar=tensor([9.9885e-05, 1.0083e-04, 1.1205e-04, 1.2214e-04, 1.0635e-04, 1.1141e-04, + 1.1177e-04, 1.1242e-04], device='cuda:0') +2023-04-27 03:11:26,633 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.637e+01 1.554e+02 1.930e+02 2.326e+02 4.798e+02, threshold=3.860e+02, percent-clipped=2.0 +2023-04-27 03:11:26,701 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=66542.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:11:40,350 INFO [finetune.py:976] (0/7) Epoch 12, batch 3550, loss[loss=0.181, simple_loss=0.2491, pruned_loss=0.05647, over 4833.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2527, pruned_loss=0.05959, over 952906.75 frames. ], batch size: 40, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:11:52,780 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1977, 3.0141, 0.8701, 1.4004, 1.9937, 1.4073, 4.1226, 1.8251], + device='cuda:0'), covar=tensor([0.0843, 0.0777, 0.1009, 0.1693, 0.0730, 0.1313, 0.0293, 0.0909], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0066, 0.0049, 0.0047, 0.0051, 0.0052, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 03:12:00,405 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0848, 2.7255, 0.9920, 1.4398, 1.9541, 1.2339, 3.3944, 1.8892], + device='cuda:0'), covar=tensor([0.0683, 0.0684, 0.0847, 0.1193, 0.0513, 0.0958, 0.0232, 0.0545], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0066, 0.0048, 0.0047, 0.0051, 0.0052, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 03:12:12,790 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66582.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:12:45,655 INFO [finetune.py:976] (0/7) Epoch 12, batch 3600, loss[loss=0.1627, simple_loss=0.2323, pruned_loss=0.04651, over 4822.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2496, pruned_loss=0.05809, over 954791.42 frames. ], batch size: 25, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:12:57,587 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66614.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:13:31,415 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.620e+02 1.935e+02 2.301e+02 4.046e+02, threshold=3.870e+02, percent-clipped=1.0 +2023-04-27 03:13:40,254 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0570, 0.7343, 0.9205, 0.7835, 1.2158, 0.9613, 0.8761, 0.9096], + device='cuda:0'), covar=tensor([0.2111, 0.1756, 0.2304, 0.1754, 0.1128, 0.1568, 0.2076, 0.2496], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0320, 0.0354, 0.0299, 0.0336, 0.0318, 0.0307, 0.0361], + device='cuda:0'), out_proj_covar=tensor([6.4536e-05, 6.7353e-05, 7.6140e-05, 6.1390e-05, 7.0169e-05, 6.7637e-05, + 6.5369e-05, 7.7345e-05], device='cuda:0') +2023-04-27 03:13:45,504 INFO [finetune.py:976] (0/7) Epoch 12, batch 3650, loss[loss=0.2613, simple_loss=0.3263, pruned_loss=0.09811, over 4812.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2525, pruned_loss=0.05958, over 953494.25 frames. ], batch size: 51, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:14:13,024 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8013, 3.8288, 2.6955, 4.4217, 3.8469, 3.8406, 1.8118, 3.7426], + device='cuda:0'), covar=tensor([0.1587, 0.1104, 0.3134, 0.1567, 0.3197, 0.1864, 0.5187, 0.2308], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0217, 0.0250, 0.0303, 0.0297, 0.0247, 0.0270, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 03:14:13,167 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-27 03:14:14,412 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-27 03:14:19,347 INFO [finetune.py:976] (0/7) Epoch 12, batch 3700, loss[loss=0.1753, simple_loss=0.2563, pruned_loss=0.0472, over 4852.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2556, pruned_loss=0.05963, over 955034.54 frames. ], batch size: 44, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:14:35,822 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66731.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:14:43,342 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.132e+02 1.645e+02 1.962e+02 2.423e+02 5.947e+02, threshold=3.923e+02, percent-clipped=4.0 +2023-04-27 03:14:52,770 INFO [finetune.py:976] (0/7) Epoch 12, batch 3750, loss[loss=0.1857, simple_loss=0.269, pruned_loss=0.05122, over 4858.00 frames. ], tot_loss[loss=0.1898, simple_loss=0.2582, pruned_loss=0.0607, over 955844.13 frames. ], batch size: 31, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:15:16,250 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=66792.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 03:15:16,297 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-27 03:15:19,331 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1394, 2.0391, 1.7404, 1.8002, 2.3182, 1.8790, 2.7622, 1.6468], + device='cuda:0'), covar=tensor([0.4351, 0.2129, 0.5337, 0.3537, 0.1786, 0.2764, 0.1630, 0.4775], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0342, 0.0425, 0.0354, 0.0377, 0.0377, 0.0374, 0.0414], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:15:21,041 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 03:15:21,677 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66800.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:15:25,457 INFO [finetune.py:976] (0/7) Epoch 12, batch 3800, loss[loss=0.2191, simple_loss=0.2731, pruned_loss=0.08249, over 4782.00 frames. ], tot_loss[loss=0.1905, simple_loss=0.2591, pruned_loss=0.06098, over 954223.01 frames. ], batch size: 51, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:15:27,426 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0733, 0.8018, 0.9347, 0.8180, 1.2182, 0.9423, 0.8390, 0.9653], + device='cuda:0'), covar=tensor([0.1740, 0.1560, 0.2169, 0.1665, 0.1028, 0.1524, 0.2084, 0.2212], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0320, 0.0353, 0.0297, 0.0334, 0.0316, 0.0306, 0.0361], + device='cuda:0'), out_proj_covar=tensor([6.4241e-05, 6.7236e-05, 7.5762e-05, 6.1150e-05, 6.9802e-05, 6.7138e-05, + 6.5231e-05, 7.7333e-05], device='cuda:0') +2023-04-27 03:15:29,218 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9137, 2.0142, 2.0384, 2.7060, 2.7726, 2.4850, 2.3259, 2.0456], + device='cuda:0'), covar=tensor([0.1457, 0.1754, 0.1750, 0.1403, 0.1070, 0.1608, 0.3096, 0.1930], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0319, 0.0353, 0.0297, 0.0334, 0.0316, 0.0306, 0.0361], + device='cuda:0'), out_proj_covar=tensor([6.4220e-05, 6.7221e-05, 7.5756e-05, 6.1127e-05, 6.9772e-05, 6.7117e-05, + 6.5209e-05, 7.7323e-05], device='cuda:0') +2023-04-27 03:15:32,267 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66816.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 03:15:48,321 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.670e+02 1.892e+02 2.401e+02 5.287e+02, threshold=3.784e+02, percent-clipped=2.0 +2023-04-27 03:15:52,964 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=66848.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:15:58,167 INFO [finetune.py:976] (0/7) Epoch 12, batch 3850, loss[loss=0.1566, simple_loss=0.2268, pruned_loss=0.04317, over 4760.00 frames. ], tot_loss[loss=0.1881, simple_loss=0.2567, pruned_loss=0.05981, over 953125.05 frames. ], batch size: 26, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:16:04,609 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=66864.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:16:05,838 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1821, 2.6360, 2.2542, 2.3805, 1.8710, 2.1032, 2.2146, 1.6775], + device='cuda:0'), covar=tensor([0.2010, 0.1090, 0.0825, 0.1291, 0.3061, 0.1330, 0.1961, 0.2807], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0313, 0.0224, 0.0283, 0.0313, 0.0263, 0.0253, 0.0271], + device='cuda:0'), out_proj_covar=tensor([1.1785e-04, 1.2497e-04, 8.9407e-05, 1.1316e-04, 1.2762e-04, 1.0530e-04, + 1.0279e-04, 1.0827e-04], device='cuda:0') +2023-04-27 03:16:12,476 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=66877.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:16:16,730 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.35 vs. limit=5.0 +2023-04-27 03:16:31,349 INFO [finetune.py:976] (0/7) Epoch 12, batch 3900, loss[loss=0.1814, simple_loss=0.2443, pruned_loss=0.05919, over 4825.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2531, pruned_loss=0.0589, over 955629.88 frames. ], batch size: 33, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:16:38,416 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=66914.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:17:12,162 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.631e+02 1.862e+02 2.227e+02 3.932e+02, threshold=3.724e+02, percent-clipped=1.0 +2023-04-27 03:17:15,390 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4014, 1.2347, 1.6116, 1.5099, 1.2679, 1.1339, 1.2731, 0.7331], + device='cuda:0'), covar=tensor([0.0522, 0.0605, 0.0437, 0.0573, 0.0751, 0.1181, 0.0551, 0.0711], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0071, 0.0070, 0.0066, 0.0075, 0.0096, 0.0075, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:17:32,061 INFO [finetune.py:976] (0/7) Epoch 12, batch 3950, loss[loss=0.1497, simple_loss=0.2189, pruned_loss=0.04029, over 4802.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2504, pruned_loss=0.05732, over 956597.02 frames. ], batch size: 29, lr: 3.64e-03, grad_scale: 16.0 +2023-04-27 03:17:33,410 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=66957.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:17:42,915 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=66962.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:17:54,820 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7239, 1.2199, 1.3978, 1.3969, 1.9149, 1.5416, 1.2296, 1.3339], + device='cuda:0'), covar=tensor([0.1658, 0.1621, 0.2155, 0.1666, 0.0916, 0.1607, 0.2232, 0.2541], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0318, 0.0350, 0.0296, 0.0331, 0.0314, 0.0304, 0.0359], + device='cuda:0'), out_proj_covar=tensor([6.3767e-05, 6.6860e-05, 7.5244e-05, 6.0861e-05, 6.8985e-05, 6.6822e-05, + 6.4678e-05, 7.6831e-05], device='cuda:0') +2023-04-27 03:18:31,902 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-27 03:18:39,567 INFO [finetune.py:976] (0/7) Epoch 12, batch 4000, loss[loss=0.1307, simple_loss=0.2049, pruned_loss=0.02826, over 4824.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2505, pruned_loss=0.05774, over 956400.53 frames. ], batch size: 25, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:18:58,952 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67018.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:19:05,172 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.70 vs. limit=2.0 +2023-04-27 03:19:14,014 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.700e+02 2.045e+02 2.371e+02 4.368e+02, threshold=4.090e+02, percent-clipped=3.0 +2023-04-27 03:19:22,919 INFO [finetune.py:976] (0/7) Epoch 12, batch 4050, loss[loss=0.2111, simple_loss=0.2796, pruned_loss=0.07127, over 4861.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.254, pruned_loss=0.05975, over 956319.55 frames. ], batch size: 34, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:19:42,043 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67083.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:19:44,500 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67087.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:19:45,900 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.37 vs. limit=5.0 +2023-04-27 03:19:48,828 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-27 03:19:56,226 INFO [finetune.py:976] (0/7) Epoch 12, batch 4100, loss[loss=0.1785, simple_loss=0.2493, pruned_loss=0.05381, over 4761.00 frames. ], tot_loss[loss=0.1888, simple_loss=0.2566, pruned_loss=0.06053, over 952429.68 frames. ], batch size: 28, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:20:17,008 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4840, 3.3687, 0.8404, 1.9458, 1.8840, 2.4287, 1.9117, 0.9948], + device='cuda:0'), covar=tensor([0.1434, 0.1126, 0.2072, 0.1216, 0.1058, 0.0998, 0.1480, 0.2192], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0250, 0.0140, 0.0122, 0.0134, 0.0154, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:20:21,138 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.668e+02 1.950e+02 2.451e+02 4.466e+02, threshold=3.899e+02, percent-clipped=3.0 +2023-04-27 03:20:22,500 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67144.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:20:29,534 INFO [finetune.py:976] (0/7) Epoch 12, batch 4150, loss[loss=0.2022, simple_loss=0.2577, pruned_loss=0.07332, over 4732.00 frames. ], tot_loss[loss=0.1916, simple_loss=0.2596, pruned_loss=0.06178, over 952816.84 frames. ], batch size: 23, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:20:45,401 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67177.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:21:03,561 INFO [finetune.py:976] (0/7) Epoch 12, batch 4200, loss[loss=0.1735, simple_loss=0.243, pruned_loss=0.05195, over 4827.00 frames. ], tot_loss[loss=0.1902, simple_loss=0.259, pruned_loss=0.06074, over 953761.32 frames. ], batch size: 30, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:21:18,098 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=67225.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:21:28,834 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.623e+02 1.873e+02 2.449e+02 3.643e+02, threshold=3.747e+02, percent-clipped=0.0 +2023-04-27 03:21:33,788 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2174, 1.7015, 2.2035, 2.6778, 2.1297, 1.6926, 1.4117, 2.0075], + device='cuda:0'), covar=tensor([0.3946, 0.3872, 0.1822, 0.2765, 0.3025, 0.3019, 0.4856, 0.2446], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0250, 0.0224, 0.0319, 0.0216, 0.0230, 0.0232, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 03:21:36,080 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.13 vs. limit=5.0 +2023-04-27 03:21:37,112 INFO [finetune.py:976] (0/7) Epoch 12, batch 4250, loss[loss=0.1618, simple_loss=0.2224, pruned_loss=0.0506, over 4147.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2561, pruned_loss=0.05956, over 954473.83 frames. ], batch size: 18, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:22:10,184 INFO [finetune.py:976] (0/7) Epoch 12, batch 4300, loss[loss=0.1903, simple_loss=0.2674, pruned_loss=0.05654, over 4917.00 frames. ], tot_loss[loss=0.1852, simple_loss=0.2532, pruned_loss=0.05866, over 953423.75 frames. ], batch size: 37, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:22:15,623 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67313.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 03:22:25,831 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7551, 1.4291, 1.8795, 2.3025, 1.9396, 1.7736, 1.7991, 1.7611], + device='cuda:0'), covar=tensor([0.5313, 0.7575, 0.7957, 0.6873, 0.6466, 0.8736, 0.9118, 1.0025], + device='cuda:0'), in_proj_covar=tensor([0.0411, 0.0406, 0.0497, 0.0515, 0.0441, 0.0461, 0.0467, 0.0470], + device='cuda:0'), out_proj_covar=tensor([9.9706e-05, 1.0074e-04, 1.1188e-04, 1.2219e-04, 1.0674e-04, 1.1117e-04, + 1.1195e-04, 1.1253e-04], device='cuda:0') +2023-04-27 03:22:26,421 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5470, 1.3771, 1.7547, 1.7850, 1.3405, 1.2447, 1.4914, 0.9770], + device='cuda:0'), covar=tensor([0.0639, 0.0669, 0.0472, 0.0657, 0.0782, 0.1108, 0.0708, 0.0684], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0071, 0.0070, 0.0066, 0.0075, 0.0096, 0.0075, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:22:36,119 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.106e+02 1.576e+02 1.859e+02 2.202e+02 4.297e+02, threshold=3.717e+02, percent-clipped=1.0 +2023-04-27 03:22:38,064 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1684, 1.6834, 2.1069, 2.5317, 2.0508, 1.6170, 1.3773, 1.9433], + device='cuda:0'), covar=tensor([0.3264, 0.3374, 0.1646, 0.2336, 0.2753, 0.2850, 0.4485, 0.2289], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0250, 0.0223, 0.0318, 0.0215, 0.0230, 0.0232, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 03:22:43,955 INFO [finetune.py:976] (0/7) Epoch 12, batch 4350, loss[loss=0.1758, simple_loss=0.2415, pruned_loss=0.05505, over 4835.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2497, pruned_loss=0.05771, over 950235.75 frames. ], batch size: 30, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:22:45,893 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0646, 2.5144, 1.0121, 1.3105, 1.9885, 1.1867, 3.3818, 1.6756], + device='cuda:0'), covar=tensor([0.0705, 0.0692, 0.0838, 0.1256, 0.0498, 0.0998, 0.0240, 0.0639], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0051, 0.0052, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 03:22:55,442 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-27 03:23:19,518 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7465, 1.5380, 1.9119, 2.0187, 1.4804, 1.3846, 1.6316, 1.0515], + device='cuda:0'), covar=tensor([0.0549, 0.0977, 0.0490, 0.0664, 0.0904, 0.1243, 0.0748, 0.0733], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0071, 0.0070, 0.0067, 0.0075, 0.0096, 0.0075, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:23:28,536 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67387.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:23:51,835 INFO [finetune.py:976] (0/7) Epoch 12, batch 4400, loss[loss=0.2403, simple_loss=0.3032, pruned_loss=0.08873, over 4893.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.252, pruned_loss=0.05906, over 951846.90 frames. ], batch size: 32, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:23:51,922 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8069, 1.2410, 4.7615, 4.4476, 4.1894, 4.5177, 4.2585, 4.2427], + device='cuda:0'), covar=tensor([0.7617, 0.6273, 0.0979, 0.1931, 0.1198, 0.1400, 0.1983, 0.1621], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0308, 0.0405, 0.0412, 0.0353, 0.0412, 0.0316, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:24:22,567 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2489, 1.8165, 2.2405, 2.5853, 2.5036, 2.0315, 1.7265, 2.1697], + device='cuda:0'), covar=tensor([0.0856, 0.1179, 0.0698, 0.0592, 0.0622, 0.1017, 0.0870, 0.0654], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0202, 0.0183, 0.0174, 0.0179, 0.0186, 0.0156, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:24:26,226 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=67435.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:24:34,557 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67439.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:24:36,308 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.200e+02 1.735e+02 1.991e+02 2.507e+02 6.130e+02, threshold=3.981e+02, percent-clipped=5.0 +2023-04-27 03:24:56,594 INFO [finetune.py:976] (0/7) Epoch 12, batch 4450, loss[loss=0.1887, simple_loss=0.2604, pruned_loss=0.05851, over 4706.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2548, pruned_loss=0.05998, over 948916.24 frames. ], batch size: 23, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:25:48,054 INFO [finetune.py:976] (0/7) Epoch 12, batch 4500, loss[loss=0.2323, simple_loss=0.2873, pruned_loss=0.08864, over 4900.00 frames. ], tot_loss[loss=0.1901, simple_loss=0.258, pruned_loss=0.06114, over 951672.53 frames. ], batch size: 32, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:25:53,039 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67513.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:26:12,842 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.294e+02 1.699e+02 1.983e+02 2.527e+02 4.329e+02, threshold=3.965e+02, percent-clipped=1.0 +2023-04-27 03:26:22,248 INFO [finetune.py:976] (0/7) Epoch 12, batch 4550, loss[loss=0.2067, simple_loss=0.2679, pruned_loss=0.0727, over 4902.00 frames. ], tot_loss[loss=0.1912, simple_loss=0.2592, pruned_loss=0.06159, over 952858.15 frames. ], batch size: 36, lr: 3.63e-03, grad_scale: 32.0 +2023-04-27 03:26:23,556 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6667, 4.3523, 1.0957, 2.1469, 2.3231, 2.7290, 2.3888, 0.8814], + device='cuda:0'), covar=tensor([0.1861, 0.1424, 0.2253, 0.1616, 0.1272, 0.1293, 0.1713, 0.2500], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0249, 0.0139, 0.0122, 0.0134, 0.0153, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:26:34,564 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67574.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 03:26:40,062 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1378, 1.6223, 2.0442, 2.4283, 2.0323, 1.5449, 1.3427, 1.8865], + device='cuda:0'), covar=tensor([0.3494, 0.3667, 0.1823, 0.2638, 0.2794, 0.2782, 0.4689, 0.2330], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0250, 0.0224, 0.0318, 0.0216, 0.0230, 0.0232, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 03:26:45,249 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8675, 1.4121, 1.6930, 1.6616, 1.6570, 1.3430, 0.7294, 1.3695], + device='cuda:0'), covar=tensor([0.3501, 0.3586, 0.1734, 0.2502, 0.2748, 0.2872, 0.4598, 0.2409], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0250, 0.0224, 0.0318, 0.0216, 0.0230, 0.0232, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 03:26:55,774 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3044, 1.8160, 2.2926, 2.8087, 2.2540, 1.7553, 1.6798, 2.2870], + device='cuda:0'), covar=tensor([0.4189, 0.3762, 0.1833, 0.3037, 0.3170, 0.3119, 0.4717, 0.2559], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0250, 0.0223, 0.0318, 0.0215, 0.0230, 0.0232, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 03:26:56,267 INFO [finetune.py:976] (0/7) Epoch 12, batch 4600, loss[loss=0.1785, simple_loss=0.2485, pruned_loss=0.05427, over 4896.00 frames. ], tot_loss[loss=0.1904, simple_loss=0.2589, pruned_loss=0.06092, over 954416.99 frames. ], batch size: 36, lr: 3.63e-03, grad_scale: 32.0 +2023-04-27 03:27:01,304 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67613.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:27:03,142 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3507, 1.1552, 1.5680, 1.5579, 1.2829, 1.1622, 1.3163, 0.7995], + device='cuda:0'), covar=tensor([0.0580, 0.0715, 0.0471, 0.0506, 0.0693, 0.1085, 0.0538, 0.0674], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0070, 0.0070, 0.0066, 0.0074, 0.0095, 0.0075, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:27:03,718 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0309, 2.4753, 1.0585, 1.2876, 1.8100, 1.1952, 3.0771, 1.5565], + device='cuda:0'), covar=tensor([0.0688, 0.0519, 0.0746, 0.1320, 0.0488, 0.1007, 0.0241, 0.0650], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0051, 0.0052, 0.0077, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 03:27:16,442 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.29 vs. limit=5.0 +2023-04-27 03:27:20,971 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.683e+02 1.939e+02 2.322e+02 5.503e+02, threshold=3.878e+02, percent-clipped=1.0 +2023-04-27 03:27:29,855 INFO [finetune.py:976] (0/7) Epoch 12, batch 4650, loss[loss=0.1783, simple_loss=0.2269, pruned_loss=0.06488, over 4157.00 frames. ], tot_loss[loss=0.188, simple_loss=0.2559, pruned_loss=0.06007, over 953900.06 frames. ], batch size: 65, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:27:34,149 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=67661.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:27:36,741 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3570, 2.9137, 2.2791, 2.2073, 1.8516, 1.8526, 2.3328, 1.7812], + device='cuda:0'), covar=tensor([0.1518, 0.1537, 0.1375, 0.1585, 0.2142, 0.1813, 0.1011, 0.1821], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0214, 0.0171, 0.0204, 0.0203, 0.0185, 0.0158, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 03:27:46,433 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67680.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:27:54,859 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67692.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:28:04,213 INFO [finetune.py:976] (0/7) Epoch 12, batch 4700, loss[loss=0.192, simple_loss=0.2424, pruned_loss=0.07082, over 4262.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2523, pruned_loss=0.05876, over 951972.92 frames. ], batch size: 66, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:28:29,588 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4698, 1.7430, 1.6501, 2.3599, 2.4375, 2.0984, 1.9756, 1.8127], + device='cuda:0'), covar=tensor([0.2072, 0.2039, 0.2346, 0.1995, 0.1453, 0.2378, 0.3259, 0.2649], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0319, 0.0354, 0.0296, 0.0334, 0.0317, 0.0306, 0.0363], + device='cuda:0'), out_proj_covar=tensor([6.4143e-05, 6.7084e-05, 7.5984e-05, 6.0864e-05, 6.9794e-05, 6.7274e-05, + 6.5135e-05, 7.7655e-05], device='cuda:0') +2023-04-27 03:28:37,291 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=67739.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:28:38,543 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67741.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:28:40,069 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.659e+02 1.914e+02 2.232e+02 4.498e+02, threshold=3.829e+02, percent-clipped=2.0 +2023-04-27 03:28:51,823 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=67753.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:28:59,227 INFO [finetune.py:976] (0/7) Epoch 12, batch 4750, loss[loss=0.1895, simple_loss=0.2617, pruned_loss=0.0586, over 4761.00 frames. ], tot_loss[loss=0.1831, simple_loss=0.2502, pruned_loss=0.05798, over 951457.56 frames. ], batch size: 26, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:29:23,855 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 03:29:42,300 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.7339, 4.5514, 3.2432, 5.4301, 4.8142, 4.6757, 2.0056, 4.6809], + device='cuda:0'), covar=tensor([0.1392, 0.0893, 0.3189, 0.0843, 0.4944, 0.1659, 0.5957, 0.2177], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0216, 0.0248, 0.0304, 0.0298, 0.0247, 0.0272, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 03:29:42,892 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=67787.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:30:06,234 INFO [finetune.py:976] (0/7) Epoch 12, batch 4800, loss[loss=0.2721, simple_loss=0.3211, pruned_loss=0.1115, over 4815.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2524, pruned_loss=0.0592, over 951736.51 frames. ], batch size: 40, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:30:23,146 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-27 03:30:32,111 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-27 03:30:35,556 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.067e+02 1.720e+02 2.004e+02 2.760e+02 5.659e+02, threshold=4.008e+02, percent-clipped=5.0 +2023-04-27 03:30:43,896 INFO [finetune.py:976] (0/7) Epoch 12, batch 4850, loss[loss=0.1784, simple_loss=0.2423, pruned_loss=0.05726, over 4834.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2541, pruned_loss=0.05901, over 948591.27 frames. ], batch size: 39, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:30:53,863 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5466, 1.1731, 4.3188, 4.0573, 3.8035, 4.1483, 4.0099, 3.8160], + device='cuda:0'), covar=tensor([0.7264, 0.6485, 0.1065, 0.1815, 0.1168, 0.1469, 0.1471, 0.1453], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0309, 0.0406, 0.0414, 0.0353, 0.0412, 0.0317, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:30:54,459 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=67869.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 03:30:55,221 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.52 vs. limit=5.0 +2023-04-27 03:31:17,820 INFO [finetune.py:976] (0/7) Epoch 12, batch 4900, loss[loss=0.173, simple_loss=0.2318, pruned_loss=0.05715, over 3943.00 frames. ], tot_loss[loss=0.1884, simple_loss=0.2566, pruned_loss=0.06015, over 950021.32 frames. ], batch size: 17, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:31:40,684 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9230, 1.2943, 1.6757, 1.9979, 1.7947, 1.3194, 0.8924, 1.4697], + device='cuda:0'), covar=tensor([0.4025, 0.4112, 0.2006, 0.3100, 0.3060, 0.3142, 0.5211, 0.2583], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0248, 0.0222, 0.0315, 0.0214, 0.0228, 0.0231, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 03:31:43,017 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.676e+01 1.571e+02 1.848e+02 2.234e+02 5.872e+02, threshold=3.696e+02, percent-clipped=2.0 +2023-04-27 03:31:43,614 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9982, 0.9701, 1.2338, 1.1236, 0.9558, 0.8802, 0.9973, 0.6081], + device='cuda:0'), covar=tensor([0.0601, 0.0739, 0.0475, 0.0669, 0.0799, 0.1244, 0.0562, 0.0747], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0071, 0.0070, 0.0067, 0.0075, 0.0096, 0.0075, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:31:46,052 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0701, 2.5826, 2.1644, 1.9785, 1.4692, 1.4160, 2.3458, 1.4396], + device='cuda:0'), covar=tensor([0.1719, 0.1633, 0.1427, 0.1929, 0.2408, 0.1988, 0.0946, 0.2108], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0213, 0.0170, 0.0204, 0.0202, 0.0184, 0.0157, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 03:31:50,182 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3759, 1.7904, 2.2242, 2.7903, 2.2712, 1.7719, 1.6247, 2.2608], + device='cuda:0'), covar=tensor([0.3355, 0.3390, 0.1653, 0.2479, 0.2693, 0.2733, 0.4335, 0.2249], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0247, 0.0222, 0.0315, 0.0213, 0.0228, 0.0231, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 03:31:51,276 INFO [finetune.py:976] (0/7) Epoch 12, batch 4950, loss[loss=0.1742, simple_loss=0.2544, pruned_loss=0.04704, over 4888.00 frames. ], tot_loss[loss=0.1896, simple_loss=0.2582, pruned_loss=0.06054, over 951724.39 frames. ], batch size: 43, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:32:09,663 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=67981.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:32:21,167 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8020, 2.0792, 1.9798, 2.1458, 1.9223, 2.1183, 2.0573, 1.9895], + device='cuda:0'), covar=tensor([0.4324, 0.7170, 0.5915, 0.5318, 0.6237, 0.8528, 0.7582, 0.6838], + device='cuda:0'), in_proj_covar=tensor([0.0328, 0.0377, 0.0315, 0.0327, 0.0339, 0.0399, 0.0358, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 03:32:21,759 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-68000.pt +2023-04-27 03:32:26,418 INFO [finetune.py:976] (0/7) Epoch 12, batch 5000, loss[loss=0.1616, simple_loss=0.2219, pruned_loss=0.05061, over 4837.00 frames. ], tot_loss[loss=0.1876, simple_loss=0.2562, pruned_loss=0.0595, over 952226.18 frames. ], batch size: 30, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:32:47,884 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68036.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:32:51,612 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68042.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:32:52,118 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.570e+01 1.647e+02 1.940e+02 2.431e+02 4.879e+02, threshold=3.879e+02, percent-clipped=2.0 +2023-04-27 03:32:55,686 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68048.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:32:59,803 INFO [finetune.py:976] (0/7) Epoch 12, batch 5050, loss[loss=0.1567, simple_loss=0.2263, pruned_loss=0.0436, over 4825.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2535, pruned_loss=0.05888, over 954428.14 frames. ], batch size: 25, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:33:22,274 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68087.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:33:33,613 INFO [finetune.py:976] (0/7) Epoch 12, batch 5100, loss[loss=0.2376, simple_loss=0.2873, pruned_loss=0.09397, over 4757.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2508, pruned_loss=0.05799, over 955663.95 frames. ], batch size: 54, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:34:09,111 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-27 03:34:11,406 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.851e+01 1.609e+02 1.945e+02 2.272e+02 5.546e+02, threshold=3.889e+02, percent-clipped=1.0 +2023-04-27 03:34:14,591 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68148.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 03:34:17,197 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.90 vs. limit=2.0 +2023-04-27 03:34:18,644 INFO [finetune.py:976] (0/7) Epoch 12, batch 5150, loss[loss=0.192, simple_loss=0.2631, pruned_loss=0.06043, over 4766.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2504, pruned_loss=0.05812, over 953222.08 frames. ], batch size: 54, lr: 3.63e-03, grad_scale: 16.0 +2023-04-27 03:34:34,038 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68169.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:35:15,176 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9748, 1.9114, 1.7116, 1.5736, 2.1433, 1.6029, 2.5324, 1.5194], + device='cuda:0'), covar=tensor([0.3746, 0.1866, 0.4830, 0.3087, 0.1648, 0.2640, 0.1415, 0.4226], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0344, 0.0426, 0.0354, 0.0378, 0.0379, 0.0372, 0.0416], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:35:16,281 INFO [finetune.py:976] (0/7) Epoch 12, batch 5200, loss[loss=0.2529, simple_loss=0.3215, pruned_loss=0.09214, over 4745.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2537, pruned_loss=0.05873, over 953847.20 frames. ], batch size: 59, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:35:34,917 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=68217.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:36:09,273 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.715e+02 2.014e+02 2.436e+02 3.767e+02, threshold=4.027e+02, percent-clipped=0.0 +2023-04-27 03:36:22,261 INFO [finetune.py:976] (0/7) Epoch 12, batch 5250, loss[loss=0.208, simple_loss=0.2763, pruned_loss=0.0699, over 4146.00 frames. ], tot_loss[loss=0.187, simple_loss=0.2556, pruned_loss=0.05915, over 954109.32 frames. ], batch size: 65, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:37:07,992 INFO [finetune.py:976] (0/7) Epoch 12, batch 5300, loss[loss=0.1742, simple_loss=0.2486, pruned_loss=0.04993, over 4820.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2558, pruned_loss=0.05915, over 954479.04 frames. ], batch size: 38, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:37:29,424 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68336.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:37:30,483 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68337.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:37:34,488 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.347e+01 1.659e+02 1.967e+02 2.376e+02 7.370e+02, threshold=3.933e+02, percent-clipped=4.0 +2023-04-27 03:37:36,534 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 03:37:37,618 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68348.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:37:41,847 INFO [finetune.py:976] (0/7) Epoch 12, batch 5350, loss[loss=0.1452, simple_loss=0.2092, pruned_loss=0.04061, over 4762.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2555, pruned_loss=0.05874, over 954843.56 frames. ], batch size: 26, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:37:43,110 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6131, 1.4709, 4.4634, 4.1252, 3.8842, 4.2287, 4.1166, 3.9078], + device='cuda:0'), covar=tensor([0.7303, 0.6228, 0.1073, 0.1882, 0.1260, 0.1972, 0.1535, 0.1645], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0307, 0.0403, 0.0409, 0.0351, 0.0410, 0.0315, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:38:00,875 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=68384.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:38:10,026 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=68396.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:38:11,925 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68399.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:38:15,657 INFO [finetune.py:976] (0/7) Epoch 12, batch 5400, loss[loss=0.1789, simple_loss=0.2465, pruned_loss=0.05569, over 4901.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2531, pruned_loss=0.05798, over 952502.30 frames. ], batch size: 36, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:38:41,371 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.609e+02 1.919e+02 2.411e+02 6.706e+02, threshold=3.839e+02, percent-clipped=2.0 +2023-04-27 03:38:41,453 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68443.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 03:38:49,105 INFO [finetune.py:976] (0/7) Epoch 12, batch 5450, loss[loss=0.2022, simple_loss=0.2631, pruned_loss=0.07071, over 4865.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2506, pruned_loss=0.05713, over 952911.86 frames. ], batch size: 31, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:38:52,245 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68460.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:39:25,653 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 03:39:27,183 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8632, 2.7389, 2.2040, 3.2844, 2.7930, 2.8252, 1.1537, 2.7383], + device='cuda:0'), covar=tensor([0.1953, 0.1909, 0.3423, 0.2831, 0.3509, 0.2251, 0.5871, 0.2923], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0217, 0.0250, 0.0305, 0.0300, 0.0250, 0.0273, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 03:39:33,462 INFO [finetune.py:976] (0/7) Epoch 12, batch 5500, loss[loss=0.1927, simple_loss=0.2621, pruned_loss=0.06165, over 4859.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2478, pruned_loss=0.05632, over 953960.13 frames. ], batch size: 44, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:39:58,193 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.589e+02 1.966e+02 2.318e+02 5.103e+02, threshold=3.932e+02, percent-clipped=5.0 +2023-04-27 03:40:06,516 INFO [finetune.py:976] (0/7) Epoch 12, batch 5550, loss[loss=0.2376, simple_loss=0.2992, pruned_loss=0.08806, over 4903.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2499, pruned_loss=0.05737, over 953307.57 frames. ], batch size: 43, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:40:21,069 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68578.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:40:51,462 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8979, 1.6774, 2.0966, 2.3567, 2.0142, 1.8378, 1.9443, 1.9515], + device='cuda:0'), covar=tensor([0.5330, 0.7222, 0.8204, 0.6678, 0.6541, 0.9617, 0.9708, 0.9857], + device='cuda:0'), in_proj_covar=tensor([0.0410, 0.0406, 0.0496, 0.0512, 0.0440, 0.0461, 0.0469, 0.0471], + device='cuda:0'), out_proj_covar=tensor([9.9478e-05, 1.0083e-04, 1.1172e-04, 1.2146e-04, 1.0656e-04, 1.1129e-04, + 1.1218e-04, 1.1262e-04], device='cuda:0') +2023-04-27 03:40:54,926 INFO [finetune.py:976] (0/7) Epoch 12, batch 5600, loss[loss=0.2106, simple_loss=0.2762, pruned_loss=0.07245, over 4936.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2537, pruned_loss=0.059, over 952992.93 frames. ], batch size: 42, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:41:36,492 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68637.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:41:37,645 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68639.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:41:39,929 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3022, 1.6202, 1.5202, 1.7980, 1.6888, 1.8568, 1.4117, 3.6764], + device='cuda:0'), covar=tensor([0.0653, 0.0802, 0.0842, 0.1208, 0.0674, 0.0555, 0.0820, 0.0128], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 03:41:46,141 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.587e+02 1.869e+02 2.457e+02 5.644e+02, threshold=3.738e+02, percent-clipped=6.0 +2023-04-27 03:41:47,447 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-27 03:41:58,625 INFO [finetune.py:976] (0/7) Epoch 12, batch 5650, loss[loss=0.2257, simple_loss=0.2947, pruned_loss=0.07837, over 4915.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2561, pruned_loss=0.05948, over 953104.64 frames. ], batch size: 36, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:42:01,209 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 03:42:33,430 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:42:33,491 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=68685.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:42:45,292 INFO [finetune.py:976] (0/7) Epoch 12, batch 5700, loss[loss=0.1845, simple_loss=0.2261, pruned_loss=0.07142, over 4406.00 frames. ], tot_loss[loss=0.187, simple_loss=0.254, pruned_loss=0.06001, over 938187.61 frames. ], batch size: 19, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:42:54,862 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.0289, 2.2042, 2.1623, 2.8327, 3.0306, 2.6761, 2.5099, 2.2263], + device='cuda:0'), covar=tensor([0.1114, 0.1655, 0.1655, 0.1307, 0.0869, 0.1288, 0.1962, 0.1836], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0319, 0.0353, 0.0296, 0.0333, 0.0317, 0.0309, 0.0362], + device='cuda:0'), out_proj_covar=tensor([6.4438e-05, 6.7198e-05, 7.5791e-05, 6.0680e-05, 6.9643e-05, 6.7296e-05, + 6.5737e-05, 7.7424e-05], device='cuda:0') +2023-04-27 03:42:58,334 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3275, 1.2514, 1.3377, 1.5426, 1.5977, 1.2823, 0.9976, 1.3979], + device='cuda:0'), covar=tensor([0.0775, 0.1151, 0.0800, 0.0529, 0.0659, 0.0800, 0.0777, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0201, 0.0182, 0.0173, 0.0178, 0.0184, 0.0155, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:43:01,972 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-12.pt +2023-04-27 03:43:16,427 INFO [finetune.py:976] (0/7) Epoch 13, batch 0, loss[loss=0.206, simple_loss=0.2772, pruned_loss=0.06739, over 4898.00 frames. ], tot_loss[loss=0.206, simple_loss=0.2772, pruned_loss=0.06739, over 4898.00 frames. ], batch size: 36, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:43:16,428 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 03:43:25,066 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3716, 3.3624, 2.6043, 3.8400, 3.4539, 3.4050, 1.6743, 3.3637], + device='cuda:0'), covar=tensor([0.1405, 0.1402, 0.2706, 0.2237, 0.2593, 0.1873, 0.4569, 0.2327], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0216, 0.0248, 0.0304, 0.0298, 0.0248, 0.0271, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 03:43:32,177 INFO [finetune.py:1010] (0/7) Epoch 13, validation: loss=0.1542, simple_loss=0.2264, pruned_loss=0.04102, over 2265189.00 frames. +2023-04-27 03:43:32,177 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 03:43:49,890 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.289e+01 1.578e+02 1.937e+02 2.291e+02 5.419e+02, threshold=3.875e+02, percent-clipped=2.0 +2023-04-27 03:43:49,991 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=68743.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:43:51,888 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=68746.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:43:57,268 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68755.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:43:59,149 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6357, 1.6012, 0.6825, 1.3762, 1.7817, 1.5079, 1.4370, 1.4393], + device='cuda:0'), covar=tensor([0.0515, 0.0376, 0.0367, 0.0550, 0.0287, 0.0536, 0.0515, 0.0598], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0037, 0.0050, 0.0037, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-27 03:44:12,761 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6485, 1.4680, 1.7826, 2.0234, 2.1213, 1.6601, 1.3102, 1.7533], + device='cuda:0'), covar=tensor([0.0968, 0.1402, 0.0780, 0.0653, 0.0655, 0.0983, 0.0914, 0.0647], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0201, 0.0182, 0.0173, 0.0178, 0.0184, 0.0155, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:44:15,624 INFO [finetune.py:976] (0/7) Epoch 13, batch 50, loss[loss=0.1664, simple_loss=0.2308, pruned_loss=0.05103, over 4189.00 frames. ], tot_loss[loss=0.1887, simple_loss=0.2571, pruned_loss=0.06011, over 217360.64 frames. ], batch size: 66, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:44:21,435 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=68791.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:44:22,076 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.4805, 1.3751, 1.4178, 1.0293, 1.4526, 1.2762, 1.7567, 1.3006], + device='cuda:0'), covar=tensor([0.3663, 0.1627, 0.4876, 0.2704, 0.1553, 0.2012, 0.1699, 0.4556], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0342, 0.0426, 0.0354, 0.0378, 0.0377, 0.0371, 0.0416], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:44:26,393 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9900, 1.7794, 2.1512, 2.5116, 2.0987, 1.8858, 1.9546, 2.0168], + device='cuda:0'), covar=tensor([0.5526, 0.7666, 0.8197, 0.6650, 0.6367, 0.9789, 1.0432, 1.0236], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0407, 0.0499, 0.0514, 0.0442, 0.0462, 0.0470, 0.0473], + device='cuda:0'), out_proj_covar=tensor([9.9960e-05, 1.0106e-04, 1.1222e-04, 1.2180e-04, 1.0698e-04, 1.1149e-04, + 1.1256e-04, 1.1316e-04], device='cuda:0') +2023-04-27 03:44:46,965 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2344, 1.6017, 2.1185, 2.4653, 2.0306, 1.6248, 1.3571, 1.8310], + device='cuda:0'), covar=tensor([0.3271, 0.3358, 0.1636, 0.2370, 0.2834, 0.2666, 0.4232, 0.2168], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0248, 0.0223, 0.0317, 0.0215, 0.0229, 0.0231, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 03:44:48,040 INFO [finetune.py:976] (0/7) Epoch 13, batch 100, loss[loss=0.1568, simple_loss=0.2338, pruned_loss=0.03992, over 4783.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2502, pruned_loss=0.05715, over 381336.84 frames. ], batch size: 29, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:44:55,551 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.308e+02 1.653e+02 1.937e+02 2.262e+02 3.719e+02, threshold=3.874e+02, percent-clipped=0.0 +2023-04-27 03:44:57,625 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 03:45:21,009 INFO [finetune.py:976] (0/7) Epoch 13, batch 150, loss[loss=0.1852, simple_loss=0.2474, pruned_loss=0.0615, over 4900.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2466, pruned_loss=0.05686, over 508860.64 frames. ], batch size: 32, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:45:47,027 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6348, 1.7263, 0.6267, 1.4042, 1.6146, 1.4946, 1.4403, 1.5043], + device='cuda:0'), covar=tensor([0.0514, 0.0373, 0.0395, 0.0558, 0.0294, 0.0524, 0.0516, 0.0588], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0037, 0.0050, 0.0038, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-27 03:45:53,957 INFO [finetune.py:976] (0/7) Epoch 13, batch 200, loss[loss=0.1776, simple_loss=0.2424, pruned_loss=0.05638, over 4766.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2447, pruned_loss=0.0565, over 608282.05 frames. ], batch size: 28, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:45:55,117 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=68934.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:46:01,423 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.726e+01 1.554e+02 1.965e+02 2.387e+02 1.026e+03, threshold=3.930e+02, percent-clipped=4.0 +2023-04-27 03:46:31,804 INFO [finetune.py:976] (0/7) Epoch 13, batch 250, loss[loss=0.201, simple_loss=0.2784, pruned_loss=0.0618, over 4866.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2491, pruned_loss=0.05731, over 685122.31 frames. ], batch size: 34, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:47:07,596 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8796, 2.5701, 0.8938, 1.2472, 1.6687, 1.1824, 3.3967, 1.4592], + device='cuda:0'), covar=tensor([0.0994, 0.0849, 0.0989, 0.1769, 0.0770, 0.1425, 0.0454, 0.1014], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 03:47:36,762 INFO [finetune.py:976] (0/7) Epoch 13, batch 300, loss[loss=0.1722, simple_loss=0.2492, pruned_loss=0.04762, over 4792.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2535, pruned_loss=0.05914, over 747007.13 frames. ], batch size: 51, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:47:47,287 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69041.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:47:48,430 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.203e+02 1.674e+02 1.942e+02 2.375e+02 4.255e+02, threshold=3.885e+02, percent-clipped=1.0 +2023-04-27 03:47:48,556 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69043.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:47:59,629 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6323, 1.3761, 1.7896, 1.8413, 1.4645, 1.3356, 1.4842, 1.0203], + device='cuda:0'), covar=tensor([0.0598, 0.0988, 0.0538, 0.0522, 0.0772, 0.1248, 0.0654, 0.0671], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0071, 0.0071, 0.0067, 0.0075, 0.0096, 0.0075, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:48:05,337 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69055.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:48:39,071 INFO [finetune.py:976] (0/7) Epoch 13, batch 350, loss[loss=0.2003, simple_loss=0.2636, pruned_loss=0.06844, over 4837.00 frames. ], tot_loss[loss=0.1882, simple_loss=0.2559, pruned_loss=0.0602, over 792859.56 frames. ], batch size: 47, lr: 3.62e-03, grad_scale: 16.0 +2023-04-27 03:48:58,060 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 03:48:59,534 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=69103.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:49:00,192 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69104.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:49:17,727 INFO [finetune.py:976] (0/7) Epoch 13, batch 400, loss[loss=0.1581, simple_loss=0.2396, pruned_loss=0.03831, over 4831.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2553, pruned_loss=0.0591, over 827208.89 frames. ], batch size: 30, lr: 3.61e-03, grad_scale: 16.0 +2023-04-27 03:49:24,721 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.707e+02 2.109e+02 2.400e+02 4.777e+02, threshold=4.219e+02, percent-clipped=1.0 +2023-04-27 03:49:30,065 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9501, 1.7108, 2.1172, 2.4422, 2.0815, 1.8645, 1.9409, 2.0020], + device='cuda:0'), covar=tensor([0.5843, 0.7911, 0.9139, 0.7333, 0.6561, 1.0828, 1.1269, 1.0238], + device='cuda:0'), in_proj_covar=tensor([0.0413, 0.0407, 0.0497, 0.0516, 0.0441, 0.0463, 0.0469, 0.0473], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:49:38,746 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69162.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:49:42,425 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69168.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:49:51,425 INFO [finetune.py:976] (0/7) Epoch 13, batch 450, loss[loss=0.227, simple_loss=0.2782, pruned_loss=0.08795, over 4825.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2541, pruned_loss=0.05893, over 855185.45 frames. ], batch size: 30, lr: 3.61e-03, grad_scale: 16.0 +2023-04-27 03:49:57,549 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9245, 2.5961, 1.9656, 1.8807, 1.3544, 1.4009, 2.0227, 1.2359], + device='cuda:0'), covar=tensor([0.1665, 0.1268, 0.1476, 0.1694, 0.2288, 0.1879, 0.0999, 0.2068], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0214, 0.0170, 0.0204, 0.0204, 0.0185, 0.0157, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 03:49:58,219 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-27 03:50:19,104 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69223.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:50:20,339 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69225.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:50:22,768 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69229.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:50:25,122 INFO [finetune.py:976] (0/7) Epoch 13, batch 500, loss[loss=0.1798, simple_loss=0.2371, pruned_loss=0.06129, over 4144.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2522, pruned_loss=0.05814, over 878094.93 frames. ], batch size: 18, lr: 3.61e-03, grad_scale: 16.0 +2023-04-27 03:50:25,819 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69234.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:50:31,193 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.700e+02 1.950e+02 2.352e+02 3.819e+02, threshold=3.900e+02, percent-clipped=0.0 +2023-04-27 03:50:32,589 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-27 03:50:32,979 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69245.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:50:37,668 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7994, 2.0793, 1.9850, 2.1644, 1.9737, 2.1424, 2.0599, 2.0465], + device='cuda:0'), covar=tensor([0.4217, 0.7063, 0.5702, 0.5625, 0.6473, 0.8219, 0.7328, 0.6495], + device='cuda:0'), in_proj_covar=tensor([0.0327, 0.0377, 0.0316, 0.0328, 0.0341, 0.0398, 0.0358, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 03:50:52,433 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69273.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:50:57,802 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=69282.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:50:58,369 INFO [finetune.py:976] (0/7) Epoch 13, batch 550, loss[loss=0.1487, simple_loss=0.2041, pruned_loss=0.04664, over 4693.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.249, pruned_loss=0.05753, over 893859.44 frames. ], batch size: 23, lr: 3.61e-03, grad_scale: 16.0 +2023-04-27 03:51:00,282 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69286.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:51:06,965 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0309, 1.9021, 1.7424, 1.5870, 2.1237, 1.6889, 2.4146, 1.5040], + device='cuda:0'), covar=tensor([0.3092, 0.1436, 0.3861, 0.2406, 0.1212, 0.1930, 0.1280, 0.4071], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0343, 0.0424, 0.0355, 0.0378, 0.0377, 0.0371, 0.0416], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:51:14,430 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69306.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 03:51:24,256 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69320.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:51:24,853 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2082, 1.6738, 5.4551, 5.1012, 4.7916, 5.1473, 4.6906, 4.8304], + device='cuda:0'), covar=tensor([0.6317, 0.5867, 0.0775, 0.1644, 0.0949, 0.0889, 0.1103, 0.1299], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0304, 0.0399, 0.0407, 0.0347, 0.0406, 0.0311, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:51:32,073 INFO [finetune.py:976] (0/7) Epoch 13, batch 600, loss[loss=0.1776, simple_loss=0.2514, pruned_loss=0.05194, over 4829.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2481, pruned_loss=0.05706, over 903623.07 frames. ], batch size: 30, lr: 3.61e-03, grad_scale: 16.0 +2023-04-27 03:51:32,802 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69334.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 03:51:37,047 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69341.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:51:38,139 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.090e+02 1.579e+02 1.985e+02 2.306e+02 4.955e+02, threshold=3.970e+02, percent-clipped=1.0 +2023-04-27 03:51:56,864 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69368.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:52:02,376 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.34 vs. limit=5.0 +2023-04-27 03:52:04,776 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69381.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:52:05,905 INFO [finetune.py:976] (0/7) Epoch 13, batch 650, loss[loss=0.1603, simple_loss=0.2415, pruned_loss=0.03955, over 4780.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2518, pruned_loss=0.05829, over 914757.35 frames. ], batch size: 28, lr: 3.61e-03, grad_scale: 16.0 +2023-04-27 03:52:06,028 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9125, 1.8109, 2.0721, 2.2727, 1.7393, 1.5277, 1.7632, 1.1082], + device='cuda:0'), covar=tensor([0.0699, 0.0823, 0.0629, 0.0842, 0.0846, 0.1191, 0.0873, 0.0893], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0070, 0.0071, 0.0067, 0.0075, 0.0096, 0.0075, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:52:08,473 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6186, 2.0489, 1.6163, 1.2967, 1.1825, 1.1741, 1.6232, 1.1536], + device='cuda:0'), covar=tensor([0.1707, 0.1361, 0.1496, 0.1967, 0.2396, 0.2084, 0.1098, 0.2038], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0214, 0.0170, 0.0204, 0.0204, 0.0185, 0.0157, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 03:52:09,605 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=69389.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:52:12,669 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69394.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:52:15,688 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69399.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:52:36,503 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69429.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:52:44,420 INFO [finetune.py:976] (0/7) Epoch 13, batch 700, loss[loss=0.1739, simple_loss=0.2502, pruned_loss=0.04875, over 4781.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2545, pruned_loss=0.05917, over 924986.61 frames. ], batch size: 29, lr: 3.61e-03, grad_scale: 16.0 +2023-04-27 03:52:56,107 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.620e+02 1.977e+02 2.375e+02 4.653e+02, threshold=3.954e+02, percent-clipped=1.0 +2023-04-27 03:53:09,380 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69455.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:53:51,165 INFO [finetune.py:976] (0/7) Epoch 13, batch 750, loss[loss=0.2213, simple_loss=0.2911, pruned_loss=0.07581, over 4822.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.254, pruned_loss=0.05894, over 926392.81 frames. ], batch size: 39, lr: 3.61e-03, grad_scale: 16.0 +2023-04-27 03:54:36,333 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69518.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:54:46,332 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69524.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:54:57,619 INFO [finetune.py:976] (0/7) Epoch 13, batch 800, loss[loss=0.1775, simple_loss=0.2474, pruned_loss=0.05386, over 4738.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2531, pruned_loss=0.05844, over 932993.92 frames. ], batch size: 23, lr: 3.61e-03, grad_scale: 16.0 +2023-04-27 03:55:09,401 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.803e+01 1.651e+02 1.914e+02 2.371e+02 3.866e+02, threshold=3.828e+02, percent-clipped=0.0 +2023-04-27 03:55:13,877 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-27 03:55:35,208 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69581.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:55:36,382 INFO [finetune.py:976] (0/7) Epoch 13, batch 850, loss[loss=0.1637, simple_loss=0.2259, pruned_loss=0.05077, over 4804.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2512, pruned_loss=0.05756, over 936618.37 frames. ], batch size: 25, lr: 3.61e-03, grad_scale: 16.0 +2023-04-27 03:55:47,514 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69601.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 03:55:53,144 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-27 03:56:07,721 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69629.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 03:56:10,108 INFO [finetune.py:976] (0/7) Epoch 13, batch 900, loss[loss=0.1893, simple_loss=0.2519, pruned_loss=0.06338, over 4941.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2489, pruned_loss=0.05712, over 941190.11 frames. ], batch size: 33, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 03:56:16,239 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.649e+02 1.999e+02 2.386e+02 5.563e+02, threshold=3.997e+02, percent-clipped=1.0 +2023-04-27 03:56:37,648 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=69674.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 03:56:39,338 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69676.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:56:44,052 INFO [finetune.py:976] (0/7) Epoch 13, batch 950, loss[loss=0.1735, simple_loss=0.2444, pruned_loss=0.05136, over 4802.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2479, pruned_loss=0.05712, over 942982.00 frames. ], batch size: 45, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 03:56:53,895 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69699.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:56:55,762 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4963, 1.4349, 1.8530, 1.8031, 1.3594, 1.1581, 1.4734, 0.9218], + device='cuda:0'), covar=tensor([0.0601, 0.0697, 0.0460, 0.0667, 0.0860, 0.1304, 0.0694, 0.0818], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0070, 0.0070, 0.0067, 0.0075, 0.0096, 0.0075, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:57:01,325 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9448, 1.8316, 2.3146, 2.3276, 1.7655, 1.5198, 1.8987, 1.0692], + device='cuda:0'), covar=tensor([0.0672, 0.0965, 0.0520, 0.0858, 0.0923, 0.1302, 0.0827, 0.0973], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0070, 0.0070, 0.0067, 0.0075, 0.0096, 0.0075, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 03:57:11,090 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69724.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:57:18,036 INFO [finetune.py:976] (0/7) Epoch 13, batch 1000, loss[loss=0.1538, simple_loss=0.219, pruned_loss=0.04434, over 4046.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2498, pruned_loss=0.05812, over 945216.82 frames. ], batch size: 17, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 03:57:19,390 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=69735.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 03:57:21,204 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0541, 2.6998, 1.0087, 1.3777, 2.0693, 1.1856, 3.4491, 1.6278], + device='cuda:0'), covar=tensor([0.0715, 0.0763, 0.0898, 0.1256, 0.0504, 0.1036, 0.0211, 0.0657], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 03:57:22,428 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1733, 1.8680, 2.2414, 2.6758, 2.6629, 1.9965, 1.7415, 2.2697], + device='cuda:0'), covar=tensor([0.0835, 0.1014, 0.0592, 0.0529, 0.0532, 0.0914, 0.0833, 0.0583], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0204, 0.0185, 0.0175, 0.0180, 0.0187, 0.0158, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 03:57:24,157 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.258e+02 1.699e+02 2.018e+02 2.497e+02 4.277e+02, threshold=4.036e+02, percent-clipped=1.0 +2023-04-27 03:57:26,750 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=69747.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:57:28,600 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=69750.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:57:49,482 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2089, 1.7983, 2.1717, 2.5309, 2.1654, 1.6797, 1.3984, 1.8704], + device='cuda:0'), covar=tensor([0.3996, 0.3844, 0.1783, 0.2422, 0.2964, 0.2991, 0.4311, 0.2353], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0247, 0.0222, 0.0315, 0.0213, 0.0228, 0.0230, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 03:57:50,562 INFO [finetune.py:976] (0/7) Epoch 13, batch 1050, loss[loss=0.2011, simple_loss=0.2587, pruned_loss=0.07174, over 4905.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.253, pruned_loss=0.05874, over 945885.19 frames. ], batch size: 37, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 03:58:13,041 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69818.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:58:16,679 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69824.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:58:23,477 INFO [finetune.py:976] (0/7) Epoch 13, batch 1100, loss[loss=0.2111, simple_loss=0.2765, pruned_loss=0.07282, over 4792.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2552, pruned_loss=0.05926, over 948570.28 frames. ], batch size: 29, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 03:58:30,708 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.653e+02 1.942e+02 2.253e+02 5.634e+02, threshold=3.884e+02, percent-clipped=2.0 +2023-04-27 03:58:31,644 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-27 03:58:50,900 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=69866.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:59:00,297 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=69872.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:59:12,140 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69881.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 03:59:13,756 INFO [finetune.py:976] (0/7) Epoch 13, batch 1150, loss[loss=0.1771, simple_loss=0.2487, pruned_loss=0.05273, over 4788.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2556, pruned_loss=0.05911, over 950160.90 frames. ], batch size: 29, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 03:59:22,322 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8666, 2.4413, 1.9723, 2.2710, 1.7407, 1.9027, 2.1211, 1.5542], + device='cuda:0'), covar=tensor([0.2163, 0.1397, 0.0903, 0.1246, 0.3274, 0.1401, 0.1913, 0.2770], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0314, 0.0227, 0.0287, 0.0316, 0.0266, 0.0257, 0.0275], + device='cuda:0'), out_proj_covar=tensor([1.1858e-04, 1.2568e-04, 9.0518e-05, 1.1466e-04, 1.2915e-04, 1.0636e-04, + 1.0418e-04, 1.0984e-04], device='cuda:0') +2023-04-27 03:59:41,630 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69901.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 03:59:53,345 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2373, 1.4837, 1.4018, 1.8475, 1.5674, 1.7843, 1.3962, 3.8288], + device='cuda:0'), covar=tensor([0.0702, 0.1041, 0.1010, 0.1365, 0.0823, 0.0835, 0.0972, 0.0203], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 04:00:15,706 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=69929.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:00:15,755 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69929.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 04:00:18,639 INFO [finetune.py:976] (0/7) Epoch 13, batch 1200, loss[loss=0.169, simple_loss=0.2404, pruned_loss=0.04883, over 4784.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2547, pruned_loss=0.05908, over 952408.86 frames. ], batch size: 26, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 04:00:32,302 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.675e+02 1.952e+02 2.332e+02 4.660e+02, threshold=3.905e+02, percent-clipped=1.0 +2023-04-27 04:00:36,031 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=69949.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 04:00:52,475 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=69976.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:00:53,027 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=69977.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:00:57,153 INFO [finetune.py:976] (0/7) Epoch 13, batch 1250, loss[loss=0.1514, simple_loss=0.2131, pruned_loss=0.04489, over 4236.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.253, pruned_loss=0.05863, over 952954.20 frames. ], batch size: 65, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 04:01:10,541 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-70000.pt +2023-04-27 04:01:25,237 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9751, 1.3593, 1.5113, 1.6435, 2.1428, 1.7456, 1.4232, 1.4525], + device='cuda:0'), covar=tensor([0.1488, 0.1701, 0.2052, 0.1408, 0.0819, 0.1651, 0.1928, 0.2228], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0317, 0.0349, 0.0292, 0.0330, 0.0314, 0.0305, 0.0358], + device='cuda:0'), out_proj_covar=tensor([6.3930e-05, 6.6686e-05, 7.4830e-05, 5.9755e-05, 6.8946e-05, 6.6737e-05, + 6.4786e-05, 7.6530e-05], device='cuda:0') +2023-04-27 04:01:26,385 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:01:26,412 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70024.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:01:27,123 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.09 vs. limit=5.0 +2023-04-27 04:01:30,025 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70030.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 04:01:31,777 INFO [finetune.py:976] (0/7) Epoch 13, batch 1300, loss[loss=0.1432, simple_loss=0.2081, pruned_loss=0.0392, over 4761.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2491, pruned_loss=0.05713, over 950367.50 frames. ], batch size: 59, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 04:01:31,911 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5432, 1.6992, 1.6644, 2.3823, 2.4871, 2.0775, 2.0231, 1.8622], + device='cuda:0'), covar=tensor([0.1917, 0.1797, 0.1903, 0.1528, 0.1344, 0.1825, 0.2513, 0.1959], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0317, 0.0349, 0.0292, 0.0330, 0.0314, 0.0305, 0.0358], + device='cuda:0'), out_proj_covar=tensor([6.3937e-05, 6.6679e-05, 7.4844e-05, 5.9749e-05, 6.8935e-05, 6.6746e-05, + 6.4781e-05, 7.6493e-05], device='cuda:0') +2023-04-27 04:01:39,421 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.185e+02 1.660e+02 1.869e+02 2.265e+02 4.379e+02, threshold=3.739e+02, percent-clipped=1.0 +2023-04-27 04:01:45,205 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70050.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:01:47,087 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8562, 1.6221, 1.9300, 2.2519, 2.2909, 1.7186, 1.4942, 1.9761], + device='cuda:0'), covar=tensor([0.0792, 0.1150, 0.0667, 0.0558, 0.0494, 0.0830, 0.0797, 0.0559], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0203, 0.0184, 0.0175, 0.0179, 0.0187, 0.0157, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:01:51,284 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3313, 3.3191, 2.4847, 3.8467, 3.3334, 3.2988, 1.3163, 3.3397], + device='cuda:0'), covar=tensor([0.1881, 0.1372, 0.3291, 0.2413, 0.4503, 0.1995, 0.6147, 0.2488], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0214, 0.0249, 0.0303, 0.0297, 0.0246, 0.0270, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:01:56,291 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-27 04:01:58,565 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=70072.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:02:05,230 INFO [finetune.py:976] (0/7) Epoch 13, batch 1350, loss[loss=0.1619, simple_loss=0.2312, pruned_loss=0.04629, over 4762.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2499, pruned_loss=0.05797, over 951891.00 frames. ], batch size: 27, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 04:02:16,769 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=70098.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:02:24,488 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70110.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:02:38,384 INFO [finetune.py:976] (0/7) Epoch 13, batch 1400, loss[loss=0.2187, simple_loss=0.2983, pruned_loss=0.0695, over 4874.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2526, pruned_loss=0.05843, over 951610.41 frames. ], batch size: 34, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 04:02:45,036 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.629e+02 2.088e+02 2.462e+02 4.428e+02, threshold=4.176e+02, percent-clipped=4.0 +2023-04-27 04:03:04,699 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70171.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:03:11,826 INFO [finetune.py:976] (0/7) Epoch 13, batch 1450, loss[loss=0.2058, simple_loss=0.284, pruned_loss=0.06377, over 4815.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2547, pruned_loss=0.05862, over 954424.52 frames. ], batch size: 47, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 04:03:45,320 INFO [finetune.py:976] (0/7) Epoch 13, batch 1500, loss[loss=0.216, simple_loss=0.2805, pruned_loss=0.0758, over 4864.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2558, pruned_loss=0.05918, over 953680.22 frames. ], batch size: 31, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 04:03:51,423 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.692e+02 1.982e+02 2.371e+02 3.829e+02, threshold=3.965e+02, percent-clipped=0.0 +2023-04-27 04:04:10,014 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 04:04:10,467 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70260.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:04:17,993 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8714, 2.3817, 1.9782, 2.2234, 1.6182, 2.0231, 2.0980, 1.7126], + device='cuda:0'), covar=tensor([0.1981, 0.1074, 0.0789, 0.1215, 0.3222, 0.1063, 0.1799, 0.2533], + device='cuda:0'), in_proj_covar=tensor([0.0296, 0.0316, 0.0228, 0.0288, 0.0318, 0.0267, 0.0259, 0.0275], + device='cuda:0'), out_proj_covar=tensor([1.1952e-04, 1.2622e-04, 9.1045e-05, 1.1509e-04, 1.2964e-04, 1.0696e-04, + 1.0501e-04, 1.1000e-04], device='cuda:0') +2023-04-27 04:04:40,470 INFO [finetune.py:976] (0/7) Epoch 13, batch 1550, loss[loss=0.1669, simple_loss=0.241, pruned_loss=0.04634, over 4870.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2555, pruned_loss=0.05829, over 956239.21 frames. ], batch size: 32, lr: 3.61e-03, grad_scale: 32.0 +2023-04-27 04:05:17,055 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5762, 1.0611, 1.3385, 1.1549, 1.6684, 1.4279, 1.1238, 1.2510], + device='cuda:0'), covar=tensor([0.1419, 0.1531, 0.1892, 0.1516, 0.0890, 0.1419, 0.2005, 0.2160], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0317, 0.0350, 0.0291, 0.0330, 0.0314, 0.0304, 0.0359], + device='cuda:0'), out_proj_covar=tensor([6.4064e-05, 6.6629e-05, 7.5005e-05, 5.9658e-05, 6.8911e-05, 6.6741e-05, + 6.4678e-05, 7.6681e-05], device='cuda:0') +2023-04-27 04:05:17,649 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=70321.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:05:28,463 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70330.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 04:05:30,176 INFO [finetune.py:976] (0/7) Epoch 13, batch 1600, loss[loss=0.1884, simple_loss=0.2444, pruned_loss=0.06623, over 4909.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2527, pruned_loss=0.05735, over 956189.67 frames. ], batch size: 36, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:05:41,006 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.445e+01 1.654e+02 2.055e+02 2.369e+02 4.520e+02, threshold=4.109e+02, percent-clipped=1.0 +2023-04-27 04:06:03,986 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-04-27 04:06:16,247 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=70378.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 04:06:19,132 INFO [finetune.py:976] (0/7) Epoch 13, batch 1650, loss[loss=0.1915, simple_loss=0.2516, pruned_loss=0.06568, over 4833.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2504, pruned_loss=0.05692, over 956548.75 frames. ], batch size: 33, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:06:36,160 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-27 04:06:52,935 INFO [finetune.py:976] (0/7) Epoch 13, batch 1700, loss[loss=0.1706, simple_loss=0.2252, pruned_loss=0.05804, over 4326.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2485, pruned_loss=0.0564, over 956916.35 frames. ], batch size: 65, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:06:55,518 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8047, 1.8313, 1.8833, 1.5091, 2.1082, 1.6196, 2.6277, 1.6255], + device='cuda:0'), covar=tensor([0.4443, 0.1936, 0.5022, 0.3501, 0.1875, 0.2678, 0.1336, 0.4726], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0345, 0.0427, 0.0358, 0.0382, 0.0381, 0.0373, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:06:59,065 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.620e+02 1.879e+02 2.417e+02 6.028e+02, threshold=3.758e+02, percent-clipped=1.0 +2023-04-27 04:07:16,018 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70466.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:07:26,839 INFO [finetune.py:976] (0/7) Epoch 13, batch 1750, loss[loss=0.1895, simple_loss=0.2429, pruned_loss=0.06803, over 4126.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2502, pruned_loss=0.05714, over 956783.80 frames. ], batch size: 65, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:08:00,058 INFO [finetune.py:976] (0/7) Epoch 13, batch 1800, loss[loss=0.1355, simple_loss=0.2094, pruned_loss=0.03076, over 4762.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2531, pruned_loss=0.05734, over 956874.49 frames. ], batch size: 27, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:08:06,010 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.577e+02 1.883e+02 2.332e+02 3.915e+02, threshold=3.766e+02, percent-clipped=2.0 +2023-04-27 04:08:21,772 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0795, 1.6425, 2.0537, 2.2900, 2.0590, 1.5783, 1.3314, 1.7720], + device='cuda:0'), covar=tensor([0.3210, 0.3256, 0.1621, 0.2515, 0.2604, 0.2656, 0.4651, 0.2263], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0246, 0.0221, 0.0314, 0.0212, 0.0227, 0.0229, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 04:08:25,061 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6424, 1.3174, 1.7880, 2.1919, 1.8401, 1.6724, 1.7384, 1.6896], + device='cuda:0'), covar=tensor([0.5081, 0.7360, 0.7380, 0.6815, 0.6379, 0.8167, 0.8592, 0.8952], + device='cuda:0'), in_proj_covar=tensor([0.0411, 0.0406, 0.0495, 0.0515, 0.0441, 0.0462, 0.0468, 0.0472], + device='cuda:0'), out_proj_covar=tensor([9.9580e-05, 1.0056e-04, 1.1132e-04, 1.2197e-04, 1.0663e-04, 1.1128e-04, + 1.1182e-04, 1.1288e-04], device='cuda:0') +2023-04-27 04:08:33,360 INFO [finetune.py:976] (0/7) Epoch 13, batch 1850, loss[loss=0.1954, simple_loss=0.2685, pruned_loss=0.0612, over 4903.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2549, pruned_loss=0.05813, over 957218.52 frames. ], batch size: 43, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:08:42,642 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1109, 1.4498, 1.3311, 1.6235, 1.4909, 1.6100, 1.3257, 3.0072], + device='cuda:0'), covar=tensor([0.0754, 0.0830, 0.0907, 0.1334, 0.0709, 0.0592, 0.0798, 0.0189], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0058], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 04:08:54,137 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=70616.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:09:06,409 INFO [finetune.py:976] (0/7) Epoch 13, batch 1900, loss[loss=0.1867, simple_loss=0.2655, pruned_loss=0.05393, over 4884.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2558, pruned_loss=0.05863, over 957607.91 frames. ], batch size: 43, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:09:12,479 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.737e+02 2.053e+02 2.436e+02 7.397e+02, threshold=4.106e+02, percent-clipped=5.0 +2023-04-27 04:10:02,034 INFO [finetune.py:976] (0/7) Epoch 13, batch 1950, loss[loss=0.1701, simple_loss=0.246, pruned_loss=0.04708, over 4922.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2553, pruned_loss=0.05852, over 956665.71 frames. ], batch size: 38, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:10:08,867 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4431, 1.2495, 1.7057, 1.6403, 1.2537, 1.1118, 1.3640, 0.8852], + device='cuda:0'), covar=tensor([0.0564, 0.0782, 0.0459, 0.0732, 0.0819, 0.1329, 0.0626, 0.0749], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0069, 0.0070, 0.0066, 0.0074, 0.0096, 0.0074, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 04:10:39,638 INFO [finetune.py:976] (0/7) Epoch 13, batch 2000, loss[loss=0.1576, simple_loss=0.2271, pruned_loss=0.044, over 4816.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2528, pruned_loss=0.05832, over 956279.58 frames. ], batch size: 39, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:10:42,237 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.52 vs. limit=5.0 +2023-04-27 04:10:52,179 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.325e+01 1.560e+02 1.780e+02 2.155e+02 4.897e+02, threshold=3.560e+02, percent-clipped=2.0 +2023-04-27 04:11:15,896 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70766.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:11:28,145 INFO [finetune.py:976] (0/7) Epoch 13, batch 2050, loss[loss=0.1674, simple_loss=0.2368, pruned_loss=0.04898, over 4822.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2503, pruned_loss=0.05781, over 957525.05 frames. ], batch size: 38, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:11:48,331 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=70814.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:11:50,813 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7389, 1.4363, 1.7232, 1.9888, 2.1314, 1.5982, 1.3285, 1.8697], + device='cuda:0'), covar=tensor([0.0771, 0.1288, 0.0769, 0.0658, 0.0518, 0.0851, 0.0836, 0.0557], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0200, 0.0181, 0.0173, 0.0177, 0.0183, 0.0155, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:12:01,813 INFO [finetune.py:976] (0/7) Epoch 13, batch 2100, loss[loss=0.2568, simple_loss=0.3248, pruned_loss=0.09436, over 4856.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2497, pruned_loss=0.05775, over 956511.52 frames. ], batch size: 44, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:12:08,818 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.590e+02 1.945e+02 2.393e+02 5.833e+02, threshold=3.889e+02, percent-clipped=3.0 +2023-04-27 04:12:30,374 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-27 04:12:34,751 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9016, 1.6553, 2.1572, 2.3868, 1.9683, 1.9080, 2.0346, 2.0157], + device='cuda:0'), covar=tensor([0.5689, 0.7892, 0.8177, 0.6895, 0.6965, 0.9765, 0.9947, 0.9394], + device='cuda:0'), in_proj_covar=tensor([0.0410, 0.0405, 0.0494, 0.0514, 0.0441, 0.0461, 0.0468, 0.0471], + device='cuda:0'), out_proj_covar=tensor([9.9586e-05, 1.0041e-04, 1.1122e-04, 1.2169e-04, 1.0648e-04, 1.1102e-04, + 1.1180e-04, 1.1260e-04], device='cuda:0') +2023-04-27 04:12:35,714 INFO [finetune.py:976] (0/7) Epoch 13, batch 2150, loss[loss=0.2309, simple_loss=0.2971, pruned_loss=0.08239, over 4865.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2527, pruned_loss=0.05873, over 956966.34 frames. ], batch size: 31, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:12:53,110 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.0784, 3.1304, 2.5352, 3.6099, 3.0390, 3.0898, 1.7273, 3.1307], + device='cuda:0'), covar=tensor([0.2087, 0.1511, 0.4522, 0.2392, 0.2970, 0.2157, 0.4653, 0.2396], + device='cuda:0'), in_proj_covar=tensor([0.0240, 0.0212, 0.0247, 0.0300, 0.0294, 0.0245, 0.0268, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:12:54,983 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4440, 1.3768, 1.6880, 1.6993, 1.3663, 1.1895, 1.4093, 0.8760], + device='cuda:0'), covar=tensor([0.0612, 0.0638, 0.0464, 0.0526, 0.0841, 0.1253, 0.0600, 0.0770], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0069, 0.0070, 0.0066, 0.0074, 0.0096, 0.0074, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 04:12:56,782 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=70916.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:12:57,522 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 04:13:01,737 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7772, 1.2740, 1.8408, 2.2001, 1.8708, 1.7785, 1.8152, 1.8292], + device='cuda:0'), covar=tensor([0.5425, 0.7205, 0.7013, 0.7387, 0.6520, 0.9561, 0.9265, 0.8302], + device='cuda:0'), in_proj_covar=tensor([0.0410, 0.0405, 0.0493, 0.0514, 0.0441, 0.0460, 0.0467, 0.0471], + device='cuda:0'), out_proj_covar=tensor([9.9466e-05, 1.0027e-04, 1.1110e-04, 1.2168e-04, 1.0649e-04, 1.1090e-04, + 1.1175e-04, 1.1253e-04], device='cuda:0') +2023-04-27 04:13:08,516 INFO [finetune.py:976] (0/7) Epoch 13, batch 2200, loss[loss=0.1862, simple_loss=0.246, pruned_loss=0.06319, over 4922.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2543, pruned_loss=0.05853, over 956559.19 frames. ], batch size: 42, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:13:16,545 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.648e+02 2.003e+02 2.484e+02 4.937e+02, threshold=4.005e+02, percent-clipped=2.0 +2023-04-27 04:13:24,616 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7391, 3.6101, 0.8116, 2.0666, 1.9639, 2.6127, 2.0689, 1.0060], + device='cuda:0'), covar=tensor([0.1254, 0.0810, 0.2064, 0.1189, 0.1020, 0.0940, 0.1378, 0.2021], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0246, 0.0139, 0.0121, 0.0132, 0.0152, 0.0117, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 04:13:29,541 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=70964.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:13:41,742 INFO [finetune.py:976] (0/7) Epoch 13, batch 2250, loss[loss=0.2352, simple_loss=0.3094, pruned_loss=0.08047, over 4890.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.256, pruned_loss=0.05879, over 956145.37 frames. ], batch size: 43, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:13:49,254 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=70992.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:13:49,343 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 04:14:13,334 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71030.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:14:14,111 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-27 04:14:14,996 INFO [finetune.py:976] (0/7) Epoch 13, batch 2300, loss[loss=0.2171, simple_loss=0.2817, pruned_loss=0.07625, over 4849.00 frames. ], tot_loss[loss=0.1873, simple_loss=0.2569, pruned_loss=0.05883, over 956917.05 frames. ], batch size: 44, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:14:23,496 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.522e+02 1.891e+02 2.315e+02 3.821e+02, threshold=3.782e+02, percent-clipped=0.0 +2023-04-27 04:14:30,605 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71053.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:15:10,014 INFO [finetune.py:976] (0/7) Epoch 13, batch 2350, loss[loss=0.1574, simple_loss=0.2192, pruned_loss=0.04777, over 4028.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2544, pruned_loss=0.05827, over 954199.36 frames. ], batch size: 17, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:15:20,863 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71091.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:15:34,865 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8561, 1.8573, 1.8064, 1.5564, 1.9902, 1.5576, 2.4923, 1.5389], + device='cuda:0'), covar=tensor([0.3572, 0.1728, 0.4556, 0.2677, 0.1554, 0.2360, 0.1381, 0.4381], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0340, 0.0423, 0.0353, 0.0377, 0.0376, 0.0371, 0.0414], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:15:46,745 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2604, 2.4851, 1.0493, 1.4726, 2.0982, 1.3229, 3.4102, 1.8604], + device='cuda:0'), covar=tensor([0.0656, 0.0622, 0.0782, 0.1300, 0.0497, 0.1028, 0.0266, 0.0646], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 04:15:57,381 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 04:16:15,384 INFO [finetune.py:976] (0/7) Epoch 13, batch 2400, loss[loss=0.1669, simple_loss=0.2301, pruned_loss=0.05181, over 4823.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.252, pruned_loss=0.0575, over 952229.29 frames. ], batch size: 39, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:16:26,847 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.510e+02 1.853e+02 2.211e+02 4.308e+02, threshold=3.705e+02, percent-clipped=1.0 +2023-04-27 04:16:54,088 INFO [finetune.py:976] (0/7) Epoch 13, batch 2450, loss[loss=0.1828, simple_loss=0.2483, pruned_loss=0.05864, over 4761.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2485, pruned_loss=0.05636, over 951735.14 frames. ], batch size: 54, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:17:19,681 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1745, 2.9329, 0.9343, 1.6731, 1.5240, 2.1706, 1.6837, 1.0075], + device='cuda:0'), covar=tensor([0.1531, 0.1063, 0.1929, 0.1274, 0.1184, 0.0982, 0.1477, 0.2009], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0248, 0.0140, 0.0122, 0.0134, 0.0153, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 04:17:26,961 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.6187, 4.4302, 3.1930, 5.2216, 4.6479, 4.5434, 2.1261, 4.4991], + device='cuda:0'), covar=tensor([0.1383, 0.1071, 0.3221, 0.1061, 0.3418, 0.1716, 0.5870, 0.2392], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0212, 0.0248, 0.0300, 0.0295, 0.0245, 0.0268, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:17:28,065 INFO [finetune.py:976] (0/7) Epoch 13, batch 2500, loss[loss=0.1727, simple_loss=0.2363, pruned_loss=0.05457, over 4731.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2485, pruned_loss=0.05639, over 951552.53 frames. ], batch size: 23, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:17:34,107 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.448e+01 1.657e+02 1.843e+02 2.125e+02 3.906e+02, threshold=3.687e+02, percent-clipped=2.0 +2023-04-27 04:17:47,612 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2228, 1.5412, 1.3297, 1.4587, 1.4177, 1.2812, 1.3920, 1.0969], + device='cuda:0'), covar=tensor([0.1936, 0.1588, 0.1071, 0.1292, 0.3707, 0.1343, 0.2054, 0.2518], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0313, 0.0227, 0.0285, 0.0315, 0.0266, 0.0257, 0.0275], + device='cuda:0'), out_proj_covar=tensor([1.1834e-04, 1.2522e-04, 9.0838e-05, 1.1406e-04, 1.2836e-04, 1.0649e-04, + 1.0427e-04, 1.0984e-04], device='cuda:0') +2023-04-27 04:18:01,395 INFO [finetune.py:976] (0/7) Epoch 13, batch 2550, loss[loss=0.1672, simple_loss=0.2506, pruned_loss=0.04189, over 4901.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2528, pruned_loss=0.05779, over 952565.67 frames. ], batch size: 43, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:18:15,028 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4371, 2.4694, 1.9254, 2.1923, 2.4332, 1.9194, 3.1576, 1.6813], + device='cuda:0'), covar=tensor([0.3748, 0.1889, 0.4360, 0.3327, 0.1866, 0.2749, 0.2029, 0.4245], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0340, 0.0424, 0.0354, 0.0377, 0.0377, 0.0371, 0.0413], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:18:24,285 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.43 vs. limit=5.0 +2023-04-27 04:18:26,087 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7113, 2.0483, 1.9215, 2.1134, 1.8810, 1.9945, 2.0031, 1.9138], + device='cuda:0'), covar=tensor([0.4199, 0.6771, 0.5504, 0.4955, 0.6306, 0.8469, 0.7083, 0.6591], + device='cuda:0'), in_proj_covar=tensor([0.0330, 0.0379, 0.0318, 0.0332, 0.0342, 0.0400, 0.0359, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:18:28,445 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8901, 2.8972, 2.1979, 3.3215, 2.9016, 2.9427, 1.1569, 2.8665], + device='cuda:0'), covar=tensor([0.2045, 0.1708, 0.3434, 0.2595, 0.3165, 0.2149, 0.5877, 0.2747], + device='cuda:0'), in_proj_covar=tensor([0.0239, 0.0211, 0.0246, 0.0298, 0.0293, 0.0243, 0.0266, 0.0266], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:18:34,377 INFO [finetune.py:976] (0/7) Epoch 13, batch 2600, loss[loss=0.2054, simple_loss=0.2619, pruned_loss=0.07449, over 4776.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2541, pruned_loss=0.05761, over 951345.74 frames. ], batch size: 28, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:18:40,522 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.287e+02 1.728e+02 1.986e+02 2.411e+02 4.998e+02, threshold=3.972e+02, percent-clipped=3.0 +2023-04-27 04:18:43,638 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71348.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:19:08,105 INFO [finetune.py:976] (0/7) Epoch 13, batch 2650, loss[loss=0.2163, simple_loss=0.2862, pruned_loss=0.0732, over 4788.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2569, pruned_loss=0.05902, over 952170.20 frames. ], batch size: 51, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:19:10,013 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71386.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:19:41,957 INFO [finetune.py:976] (0/7) Epoch 13, batch 2700, loss[loss=0.174, simple_loss=0.2495, pruned_loss=0.04928, over 4822.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2561, pruned_loss=0.0589, over 953772.10 frames. ], batch size: 38, lr: 3.60e-03, grad_scale: 32.0 +2023-04-27 04:19:48,071 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.223e+02 1.575e+02 1.881e+02 2.224e+02 4.491e+02, threshold=3.762e+02, percent-clipped=1.0 +2023-04-27 04:20:30,301 INFO [finetune.py:976] (0/7) Epoch 13, batch 2750, loss[loss=0.1878, simple_loss=0.2493, pruned_loss=0.06313, over 4911.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2525, pruned_loss=0.05782, over 953109.18 frames. ], batch size: 36, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:21:17,968 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-27 04:21:21,453 INFO [finetune.py:976] (0/7) Epoch 13, batch 2800, loss[loss=0.2057, simple_loss=0.2686, pruned_loss=0.07147, over 4909.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2496, pruned_loss=0.05705, over 953197.33 frames. ], batch size: 32, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:21:33,191 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.066e+02 1.551e+02 1.952e+02 2.336e+02 5.892e+02, threshold=3.903e+02, percent-clipped=4.0 +2023-04-27 04:22:06,642 INFO [finetune.py:976] (0/7) Epoch 13, batch 2850, loss[loss=0.184, simple_loss=0.2424, pruned_loss=0.06281, over 4716.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2482, pruned_loss=0.05635, over 953615.59 frames. ], batch size: 23, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:22:15,905 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4037, 1.7246, 1.5500, 2.2407, 2.3893, 1.9493, 1.8332, 1.6383], + device='cuda:0'), covar=tensor([0.1522, 0.1752, 0.1918, 0.1383, 0.1007, 0.2065, 0.2188, 0.1998], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0317, 0.0353, 0.0294, 0.0331, 0.0315, 0.0306, 0.0362], + device='cuda:0'), out_proj_covar=tensor([6.4095e-05, 6.6584e-05, 7.5758e-05, 6.0129e-05, 6.9092e-05, 6.6832e-05, + 6.4975e-05, 7.7594e-05], device='cuda:0') +2023-04-27 04:22:18,524 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4005, 1.3572, 1.6999, 1.6629, 1.3205, 1.1221, 1.3629, 0.9146], + device='cuda:0'), covar=tensor([0.0635, 0.0676, 0.0449, 0.0555, 0.0809, 0.1057, 0.0653, 0.0698], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0070, 0.0070, 0.0067, 0.0075, 0.0096, 0.0075, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 04:22:27,986 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71616.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:22:40,693 INFO [finetune.py:976] (0/7) Epoch 13, batch 2900, loss[loss=0.179, simple_loss=0.2516, pruned_loss=0.0532, over 4889.00 frames. ], tot_loss[loss=0.183, simple_loss=0.251, pruned_loss=0.05753, over 952197.83 frames. ], batch size: 32, lr: 3.59e-03, grad_scale: 64.0 +2023-04-27 04:22:46,794 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.322e+02 1.703e+02 2.022e+02 2.297e+02 3.791e+02, threshold=4.044e+02, percent-clipped=0.0 +2023-04-27 04:22:49,910 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71648.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:23:09,458 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71677.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:23:13,482 INFO [finetune.py:976] (0/7) Epoch 13, batch 2950, loss[loss=0.2149, simple_loss=0.2948, pruned_loss=0.06749, over 4901.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2558, pruned_loss=0.05902, over 953837.74 frames. ], batch size: 43, lr: 3.59e-03, grad_scale: 64.0 +2023-04-27 04:23:15,368 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=71686.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:23:21,427 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=71696.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:23:33,099 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1015, 1.0611, 1.2440, 1.1911, 1.0224, 0.9062, 0.9841, 0.5085], + device='cuda:0'), covar=tensor([0.0614, 0.0690, 0.0570, 0.0565, 0.0785, 0.1292, 0.0529, 0.0847], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0070, 0.0067, 0.0075, 0.0096, 0.0075, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 04:23:45,594 INFO [finetune.py:976] (0/7) Epoch 13, batch 3000, loss[loss=0.2076, simple_loss=0.2755, pruned_loss=0.06979, over 4910.00 frames. ], tot_loss[loss=0.1877, simple_loss=0.2564, pruned_loss=0.05949, over 955040.53 frames. ], batch size: 36, lr: 3.59e-03, grad_scale: 64.0 +2023-04-27 04:23:45,595 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 04:23:56,065 INFO [finetune.py:1010] (0/7) Epoch 13, validation: loss=0.1517, simple_loss=0.224, pruned_loss=0.03973, over 2265189.00 frames. +2023-04-27 04:23:56,066 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 04:23:56,742 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=71734.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:24:03,110 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.669e+02 1.992e+02 2.305e+02 4.949e+02, threshold=3.985e+02, percent-clipped=1.0 +2023-04-27 04:24:05,687 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7364, 2.2069, 1.6063, 1.4029, 1.3049, 1.2525, 1.6644, 1.2435], + device='cuda:0'), covar=tensor([0.1625, 0.1309, 0.1540, 0.1858, 0.2484, 0.1983, 0.1091, 0.2056], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0213, 0.0168, 0.0202, 0.0201, 0.0183, 0.0156, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 04:24:27,544 INFO [finetune.py:976] (0/7) Epoch 13, batch 3050, loss[loss=0.2125, simple_loss=0.2683, pruned_loss=0.07831, over 4895.00 frames. ], tot_loss[loss=0.1872, simple_loss=0.2562, pruned_loss=0.05907, over 954167.55 frames. ], batch size: 32, lr: 3.59e-03, grad_scale: 64.0 +2023-04-27 04:24:41,167 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3771, 1.2787, 4.1434, 3.8679, 3.6869, 4.0159, 3.9408, 3.6868], + device='cuda:0'), covar=tensor([0.6912, 0.5920, 0.1038, 0.1678, 0.1026, 0.1379, 0.1270, 0.1346], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0306, 0.0403, 0.0407, 0.0347, 0.0405, 0.0313, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:24:47,908 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 04:25:00,526 INFO [finetune.py:976] (0/7) Epoch 13, batch 3100, loss[loss=0.1741, simple_loss=0.2429, pruned_loss=0.05268, over 4894.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.254, pruned_loss=0.05831, over 953654.42 frames. ], batch size: 43, lr: 3.59e-03, grad_scale: 64.0 +2023-04-27 04:25:08,972 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.880e+01 1.511e+02 1.834e+02 2.152e+02 3.267e+02, threshold=3.669e+02, percent-clipped=0.0 +2023-04-27 04:25:54,816 INFO [finetune.py:976] (0/7) Epoch 13, batch 3150, loss[loss=0.1424, simple_loss=0.2084, pruned_loss=0.03826, over 4814.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2509, pruned_loss=0.05743, over 953594.28 frames. ], batch size: 51, lr: 3.59e-03, grad_scale: 64.0 +2023-04-27 04:26:19,586 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9766, 1.7646, 2.0636, 2.3141, 2.3528, 1.8545, 1.5120, 2.1296], + device='cuda:0'), covar=tensor([0.0875, 0.1028, 0.0613, 0.0611, 0.0620, 0.0856, 0.0882, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0202, 0.0183, 0.0174, 0.0179, 0.0184, 0.0156, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:26:48,045 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=71919.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:26:51,721 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5756, 1.4289, 0.5747, 1.2486, 1.3899, 1.4249, 1.3361, 1.3217], + device='cuda:0'), covar=tensor([0.0504, 0.0369, 0.0401, 0.0557, 0.0293, 0.0519, 0.0480, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0037, 0.0050, 0.0037, 0.0048, 0.0048, 0.0050], + device='cuda:0') +2023-04-27 04:27:02,054 INFO [finetune.py:976] (0/7) Epoch 13, batch 3200, loss[loss=0.1452, simple_loss=0.2086, pruned_loss=0.04091, over 4767.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2479, pruned_loss=0.05695, over 954932.97 frames. ], batch size: 26, lr: 3.59e-03, grad_scale: 64.0 +2023-04-27 04:27:08,184 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.251e+02 1.591e+02 1.837e+02 2.276e+02 4.272e+02, threshold=3.675e+02, percent-clipped=2.0 +2023-04-27 04:27:26,448 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2896, 1.4888, 1.6450, 1.8118, 1.6685, 1.7449, 1.7319, 1.7225], + device='cuda:0'), covar=tensor([0.4391, 0.6050, 0.5299, 0.4878, 0.5957, 0.8137, 0.5697, 0.5548], + device='cuda:0'), in_proj_covar=tensor([0.0331, 0.0380, 0.0320, 0.0333, 0.0343, 0.0401, 0.0359, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:27:28,806 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=71972.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:27:33,721 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4192, 1.3558, 3.9873, 3.6974, 3.5274, 3.7290, 3.6916, 3.5024], + device='cuda:0'), covar=tensor([0.7187, 0.5738, 0.1030, 0.1826, 0.1082, 0.1905, 0.2234, 0.1476], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0305, 0.0401, 0.0406, 0.0345, 0.0405, 0.0311, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:27:33,763 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=71980.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:27:35,466 INFO [finetune.py:976] (0/7) Epoch 13, batch 3250, loss[loss=0.1724, simple_loss=0.2445, pruned_loss=0.05017, over 4764.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.25, pruned_loss=0.0577, over 955923.13 frames. ], batch size: 26, lr: 3.59e-03, grad_scale: 64.0 +2023-04-27 04:27:47,377 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-72000.pt +2023-04-27 04:28:10,359 INFO [finetune.py:976] (0/7) Epoch 13, batch 3300, loss[loss=0.1704, simple_loss=0.2492, pruned_loss=0.04577, over 4906.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2546, pruned_loss=0.05901, over 957405.25 frames. ], batch size: 36, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:28:16,991 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.091e+02 1.593e+02 1.877e+02 2.325e+02 3.643e+02, threshold=3.754e+02, percent-clipped=0.0 +2023-04-27 04:28:19,016 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-27 04:28:22,232 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6738, 3.5633, 2.7447, 4.2837, 3.6691, 3.6910, 1.5158, 3.6531], + device='cuda:0'), covar=tensor([0.1780, 0.1358, 0.3162, 0.1548, 0.3559, 0.1665, 0.5810, 0.2495], + device='cuda:0'), in_proj_covar=tensor([0.0241, 0.0213, 0.0246, 0.0300, 0.0295, 0.0245, 0.0267, 0.0267], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:28:24,024 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1795, 1.6343, 2.1092, 2.5087, 2.0739, 1.6453, 1.3692, 1.8963], + device='cuda:0'), covar=tensor([0.3281, 0.3460, 0.1747, 0.2553, 0.2844, 0.2796, 0.4402, 0.2214], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0246, 0.0221, 0.0314, 0.0213, 0.0228, 0.0228, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 04:28:43,989 INFO [finetune.py:976] (0/7) Epoch 13, batch 3350, loss[loss=0.1915, simple_loss=0.2601, pruned_loss=0.0615, over 4835.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2562, pruned_loss=0.05932, over 955819.33 frames. ], batch size: 49, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:28:53,335 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-27 04:29:15,415 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72129.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:29:16,755 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-27 04:29:17,728 INFO [finetune.py:976] (0/7) Epoch 13, batch 3400, loss[loss=0.2132, simple_loss=0.2741, pruned_loss=0.07616, over 4826.00 frames. ], tot_loss[loss=0.1889, simple_loss=0.2576, pruned_loss=0.0601, over 951359.87 frames. ], batch size: 33, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:29:24,412 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.571e+02 1.878e+02 2.335e+02 4.983e+02, threshold=3.756e+02, percent-clipped=1.0 +2023-04-27 04:29:43,551 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0142, 1.7357, 2.0662, 2.3471, 2.3684, 1.9177, 1.5145, 2.1587], + device='cuda:0'), covar=tensor([0.0892, 0.1191, 0.0698, 0.0666, 0.0616, 0.0839, 0.0882, 0.0591], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0201, 0.0181, 0.0174, 0.0178, 0.0183, 0.0155, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:29:51,375 INFO [finetune.py:976] (0/7) Epoch 13, batch 3450, loss[loss=0.1757, simple_loss=0.2511, pruned_loss=0.0502, over 4819.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2565, pruned_loss=0.05913, over 951014.74 frames. ], batch size: 30, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:29:55,712 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72190.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:30:03,061 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3626, 3.0387, 0.8166, 1.6961, 2.1986, 1.3401, 3.9513, 1.9910], + device='cuda:0'), covar=tensor([0.0667, 0.0684, 0.0941, 0.1206, 0.0541, 0.0989, 0.0187, 0.0618], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0067, 0.0049, 0.0047, 0.0051, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 04:30:16,957 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 04:30:24,561 INFO [finetune.py:976] (0/7) Epoch 13, batch 3500, loss[loss=0.15, simple_loss=0.218, pruned_loss=0.04103, over 4788.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2527, pruned_loss=0.05789, over 952661.89 frames. ], batch size: 45, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:30:31,095 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.651e+02 2.080e+02 2.507e+02 4.410e+02, threshold=4.160e+02, percent-clipped=6.0 +2023-04-27 04:30:51,285 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.64 vs. limit=2.0 +2023-04-27 04:31:01,849 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-27 04:31:08,045 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72272.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:31:09,838 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72275.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:31:20,405 INFO [finetune.py:976] (0/7) Epoch 13, batch 3550, loss[loss=0.1609, simple_loss=0.2293, pruned_loss=0.04625, over 4930.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2495, pruned_loss=0.05757, over 952473.76 frames. ], batch size: 38, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:32:03,327 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9843, 2.5962, 2.1268, 2.3722, 1.8325, 2.1417, 2.1342, 1.7366], + device='cuda:0'), covar=tensor([0.2144, 0.1160, 0.0808, 0.1261, 0.3010, 0.1034, 0.1918, 0.2713], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0314, 0.0227, 0.0286, 0.0314, 0.0267, 0.0259, 0.0274], + device='cuda:0'), out_proj_covar=tensor([1.1911e-04, 1.2550e-04, 9.0716e-05, 1.1445e-04, 1.2811e-04, 1.0676e-04, + 1.0479e-04, 1.0947e-04], device='cuda:0') +2023-04-27 04:32:06,133 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=72320.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:32:16,782 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72326.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:32:26,338 INFO [finetune.py:976] (0/7) Epoch 13, batch 3600, loss[loss=0.2025, simple_loss=0.2692, pruned_loss=0.06793, over 4904.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2474, pruned_loss=0.05639, over 954327.04 frames. ], batch size: 37, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:32:33,138 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.090e+02 1.673e+02 2.029e+02 2.570e+02 4.003e+02, threshold=4.058e+02, percent-clipped=0.0 +2023-04-27 04:32:59,900 INFO [finetune.py:976] (0/7) Epoch 13, batch 3650, loss[loss=0.2031, simple_loss=0.2772, pruned_loss=0.0645, over 4847.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2508, pruned_loss=0.05764, over 955161.80 frames. ], batch size: 49, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:33:02,551 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72387.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:33:09,279 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5475, 2.0357, 2.5446, 3.1815, 2.4296, 1.9313, 1.8665, 2.5042], + device='cuda:0'), covar=tensor([0.3625, 0.3345, 0.1693, 0.2795, 0.3022, 0.2855, 0.4136, 0.2157], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0247, 0.0222, 0.0315, 0.0213, 0.0228, 0.0229, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 04:33:14,348 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1178, 1.4548, 1.3628, 1.6597, 1.5322, 1.6288, 1.3101, 2.4972], + device='cuda:0'), covar=tensor([0.0607, 0.0800, 0.0808, 0.1161, 0.0646, 0.0462, 0.0741, 0.0222], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 04:33:33,724 INFO [finetune.py:976] (0/7) Epoch 13, batch 3700, loss[loss=0.1575, simple_loss=0.2446, pruned_loss=0.03524, over 4781.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2537, pruned_loss=0.05816, over 955448.16 frames. ], batch size: 26, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:33:35,082 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1008, 0.7974, 0.9113, 0.8018, 1.2718, 0.9351, 0.8798, 0.9806], + device='cuda:0'), covar=tensor([0.1561, 0.1464, 0.1991, 0.1616, 0.1022, 0.1486, 0.1894, 0.2254], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0319, 0.0354, 0.0295, 0.0332, 0.0316, 0.0309, 0.0365], + device='cuda:0'), out_proj_covar=tensor([6.4257e-05, 6.7066e-05, 7.6096e-05, 6.0385e-05, 6.9279e-05, 6.7107e-05, + 6.5697e-05, 7.8155e-05], device='cuda:0') +2023-04-27 04:33:40,456 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.574e+02 1.888e+02 2.267e+02 6.702e+02, threshold=3.776e+02, percent-clipped=3.0 +2023-04-27 04:33:51,047 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-27 04:34:07,018 INFO [finetune.py:976] (0/7) Epoch 13, batch 3750, loss[loss=0.2125, simple_loss=0.2897, pruned_loss=0.06765, over 4740.00 frames. ], tot_loss[loss=0.1865, simple_loss=0.2551, pruned_loss=0.05895, over 954838.02 frames. ], batch size: 54, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:34:08,311 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72485.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:34:15,670 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6580, 1.2725, 1.8554, 2.1713, 1.8084, 1.6691, 1.7693, 1.6785], + device='cuda:0'), covar=tensor([0.4818, 0.6912, 0.6387, 0.5843, 0.5734, 0.7445, 0.7569, 0.9006], + device='cuda:0'), in_proj_covar=tensor([0.0409, 0.0405, 0.0493, 0.0510, 0.0439, 0.0460, 0.0467, 0.0471], + device='cuda:0'), out_proj_covar=tensor([9.9290e-05, 1.0028e-04, 1.1099e-04, 1.2105e-04, 1.0608e-04, 1.1087e-04, + 1.1158e-04, 1.1254e-04], device='cuda:0') +2023-04-27 04:34:30,204 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-27 04:34:33,528 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5222, 1.7513, 1.3679, 1.1103, 1.1477, 1.0893, 1.3835, 1.0822], + device='cuda:0'), covar=tensor([0.1773, 0.1271, 0.1741, 0.1859, 0.2478, 0.2157, 0.1217, 0.2227], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0215, 0.0170, 0.0205, 0.0203, 0.0185, 0.0158, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 04:34:39,242 INFO [finetune.py:976] (0/7) Epoch 13, batch 3800, loss[loss=0.1558, simple_loss=0.2303, pruned_loss=0.0407, over 4831.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2566, pruned_loss=0.05924, over 955878.14 frames. ], batch size: 30, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:34:46,446 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.067e+02 1.671e+02 1.907e+02 2.254e+02 4.045e+02, threshold=3.813e+02, percent-clipped=1.0 +2023-04-27 04:34:47,827 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2208, 1.4960, 1.6684, 1.8700, 1.7439, 1.8745, 1.7486, 1.7074], + device='cuda:0'), covar=tensor([0.4224, 0.5756, 0.5255, 0.4871, 0.6108, 0.7534, 0.5505, 0.5413], + device='cuda:0'), in_proj_covar=tensor([0.0330, 0.0378, 0.0319, 0.0331, 0.0344, 0.0399, 0.0358, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:35:05,795 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72575.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:35:11,559 INFO [finetune.py:976] (0/7) Epoch 13, batch 3850, loss[loss=0.2074, simple_loss=0.2731, pruned_loss=0.07089, over 4839.00 frames. ], tot_loss[loss=0.1857, simple_loss=0.2548, pruned_loss=0.05835, over 955112.63 frames. ], batch size: 49, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:35:18,165 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3626, 1.6582, 1.6973, 1.8739, 1.7211, 1.8326, 1.8839, 1.7653], + device='cuda:0'), covar=tensor([0.4209, 0.5180, 0.4786, 0.4802, 0.5650, 0.7441, 0.5097, 0.4772], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0379, 0.0320, 0.0331, 0.0345, 0.0400, 0.0359, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:35:24,325 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.73 vs. limit=5.0 +2023-04-27 04:35:37,009 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=72623.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:35:41,196 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6554, 1.5177, 0.8629, 1.2973, 1.5713, 1.5083, 1.3908, 1.4005], + device='cuda:0'), covar=tensor([0.0509, 0.0380, 0.0387, 0.0579, 0.0302, 0.0543, 0.0525, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0050, 0.0037, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 04:35:44,026 INFO [finetune.py:976] (0/7) Epoch 13, batch 3900, loss[loss=0.154, simple_loss=0.2155, pruned_loss=0.04624, over 4818.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2526, pruned_loss=0.05791, over 956306.14 frames. ], batch size: 30, lr: 3.59e-03, grad_scale: 32.0 +2023-04-27 04:35:51,957 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.669e+02 1.861e+02 2.404e+02 3.488e+02, threshold=3.722e+02, percent-clipped=0.0 +2023-04-27 04:36:20,899 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8358, 2.3103, 0.8800, 1.1843, 1.5541, 1.0898, 2.4574, 1.3354], + device='cuda:0'), covar=tensor([0.0688, 0.0511, 0.0626, 0.1270, 0.0473, 0.1008, 0.0345, 0.0728], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 04:36:22,218 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4761, 1.6770, 1.8258, 2.0182, 1.7438, 1.9092, 1.9596, 1.8332], + device='cuda:0'), covar=tensor([0.4909, 0.6456, 0.5016, 0.4731, 0.6091, 0.7437, 0.6030, 0.5613], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0379, 0.0320, 0.0331, 0.0345, 0.0399, 0.0359, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:36:28,933 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6594, 1.1989, 1.8321, 2.1871, 1.7814, 1.6459, 1.6985, 1.6938], + device='cuda:0'), covar=tensor([0.4981, 0.7472, 0.6875, 0.6595, 0.6416, 0.8367, 0.8647, 0.9097], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0407, 0.0496, 0.0514, 0.0442, 0.0464, 0.0470, 0.0474], + device='cuda:0'), out_proj_covar=tensor([9.9810e-05, 1.0086e-04, 1.1162e-04, 1.2190e-04, 1.0685e-04, 1.1180e-04, + 1.1227e-04, 1.1320e-04], device='cuda:0') +2023-04-27 04:36:32,463 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=72682.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:36:33,074 INFO [finetune.py:976] (0/7) Epoch 13, batch 3950, loss[loss=0.1624, simple_loss=0.2441, pruned_loss=0.04038, over 4870.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2486, pruned_loss=0.0559, over 956741.59 frames. ], batch size: 34, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:36:42,347 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7243, 1.3882, 1.3898, 1.4776, 1.9474, 1.5543, 1.2933, 1.2883], + device='cuda:0'), covar=tensor([0.1447, 0.1299, 0.1762, 0.1480, 0.0707, 0.1422, 0.2097, 0.2031], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0317, 0.0352, 0.0292, 0.0329, 0.0313, 0.0307, 0.0362], + device='cuda:0'), out_proj_covar=tensor([6.3726e-05, 6.6490e-05, 7.5500e-05, 5.9819e-05, 6.8629e-05, 6.6498e-05, + 6.5223e-05, 7.7328e-05], device='cuda:0') +2023-04-27 04:36:50,288 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9296, 0.5828, 0.7050, 0.6408, 1.1055, 0.8571, 0.7195, 0.7905], + device='cuda:0'), covar=tensor([0.1553, 0.1586, 0.2035, 0.1693, 0.1094, 0.1440, 0.1861, 0.2142], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0316, 0.0351, 0.0292, 0.0329, 0.0313, 0.0307, 0.0361], + device='cuda:0'), out_proj_covar=tensor([6.3645e-05, 6.6422e-05, 7.5420e-05, 5.9743e-05, 6.8556e-05, 6.6429e-05, + 6.5153e-05, 7.7232e-05], device='cuda:0') +2023-04-27 04:37:38,716 INFO [finetune.py:976] (0/7) Epoch 13, batch 4000, loss[loss=0.2011, simple_loss=0.2711, pruned_loss=0.06559, over 4770.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2472, pruned_loss=0.05554, over 955867.75 frames. ], batch size: 26, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:37:57,171 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 7.525e+01 1.494e+02 1.807e+02 2.123e+02 3.249e+02, threshold=3.613e+02, percent-clipped=0.0 +2023-04-27 04:38:27,849 INFO [finetune.py:976] (0/7) Epoch 13, batch 4050, loss[loss=0.216, simple_loss=0.2779, pruned_loss=0.07709, over 4832.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2508, pruned_loss=0.05702, over 954497.76 frames. ], batch size: 33, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:38:29,179 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72785.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:39:01,350 INFO [finetune.py:976] (0/7) Epoch 13, batch 4100, loss[loss=0.1769, simple_loss=0.2586, pruned_loss=0.04761, over 4789.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2543, pruned_loss=0.0587, over 954529.69 frames. ], batch size: 29, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:39:01,407 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=72833.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:39:02,219 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-27 04:39:09,032 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.644e+02 1.849e+02 2.325e+02 6.848e+02, threshold=3.698e+02, percent-clipped=3.0 +2023-04-27 04:39:30,648 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3434, 2.4158, 1.8287, 2.0918, 2.2776, 1.9762, 3.0882, 1.7119], + device='cuda:0'), covar=tensor([0.4336, 0.1836, 0.4970, 0.3483, 0.2199, 0.2624, 0.2015, 0.4610], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0344, 0.0427, 0.0355, 0.0382, 0.0381, 0.0372, 0.0418], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:39:34,765 INFO [finetune.py:976] (0/7) Epoch 13, batch 4150, loss[loss=0.2102, simple_loss=0.275, pruned_loss=0.07276, over 4765.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2562, pruned_loss=0.05935, over 950789.28 frames. ], batch size: 54, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:39:42,044 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72894.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:39:43,799 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-27 04:39:50,474 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7110, 2.2088, 5.5908, 5.2456, 4.9980, 5.3017, 4.8062, 5.0218], + device='cuda:0'), covar=tensor([0.5404, 0.5331, 0.0844, 0.1473, 0.0997, 0.1634, 0.0950, 0.1333], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0304, 0.0401, 0.0405, 0.0345, 0.0403, 0.0312, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:40:06,133 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7246, 3.8073, 2.7076, 4.3577, 3.8953, 3.7355, 1.8926, 3.7653], + device='cuda:0'), covar=tensor([0.1786, 0.1223, 0.3183, 0.1652, 0.3155, 0.1809, 0.5413, 0.2529], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0213, 0.0248, 0.0301, 0.0295, 0.0247, 0.0270, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:40:08,515 INFO [finetune.py:976] (0/7) Epoch 13, batch 4200, loss[loss=0.1929, simple_loss=0.2625, pruned_loss=0.0617, over 4800.00 frames. ], tot_loss[loss=0.1867, simple_loss=0.2554, pruned_loss=0.05895, over 949589.99 frames. ], batch size: 25, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:40:15,129 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.034e+02 1.607e+02 1.987e+02 2.411e+02 4.371e+02, threshold=3.975e+02, percent-clipped=4.0 +2023-04-27 04:40:19,180 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3096, 3.2432, 2.6589, 2.9186, 2.2130, 2.5994, 2.8378, 2.0013], + device='cuda:0'), covar=tensor([0.2359, 0.1183, 0.0808, 0.1331, 0.3079, 0.1088, 0.1765, 0.2962], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0312, 0.0228, 0.0285, 0.0315, 0.0267, 0.0257, 0.0273], + device='cuda:0'), out_proj_covar=tensor([1.1856e-04, 1.2464e-04, 9.0932e-05, 1.1400e-04, 1.2852e-04, 1.0694e-04, + 1.0393e-04, 1.0894e-04], device='cuda:0') +2023-04-27 04:40:20,315 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9841, 1.4423, 1.9849, 2.4254, 2.0777, 1.9138, 1.9876, 1.9670], + device='cuda:0'), covar=tensor([0.5225, 0.6915, 0.6990, 0.6379, 0.6763, 0.7724, 0.8063, 0.7775], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0407, 0.0496, 0.0512, 0.0442, 0.0462, 0.0469, 0.0474], + device='cuda:0'), out_proj_covar=tensor([9.9810e-05, 1.0089e-04, 1.1160e-04, 1.2158e-04, 1.0676e-04, 1.1141e-04, + 1.1207e-04, 1.1316e-04], device='cuda:0') +2023-04-27 04:40:23,803 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=72955.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:40:30,237 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=72964.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 04:40:35,220 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-27 04:40:41,138 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=72982.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:40:41,680 INFO [finetune.py:976] (0/7) Epoch 13, batch 4250, loss[loss=0.1898, simple_loss=0.2536, pruned_loss=0.06302, over 4897.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2538, pruned_loss=0.05903, over 951313.05 frames. ], batch size: 32, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:41:04,724 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 04:41:10,632 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73025.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 04:41:13,601 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=73030.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:41:15,356 INFO [finetune.py:976] (0/7) Epoch 13, batch 4300, loss[loss=0.186, simple_loss=0.2571, pruned_loss=0.05751, over 4737.00 frames. ], tot_loss[loss=0.184, simple_loss=0.2515, pruned_loss=0.05825, over 950963.88 frames. ], batch size: 23, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:41:20,355 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6858, 1.2221, 1.7211, 2.1917, 1.8292, 1.6607, 1.7430, 1.6834], + device='cuda:0'), covar=tensor([0.4731, 0.6604, 0.6862, 0.6182, 0.5727, 0.8012, 0.7686, 0.8750], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0407, 0.0496, 0.0512, 0.0442, 0.0463, 0.0469, 0.0474], + device='cuda:0'), out_proj_covar=tensor([9.9925e-05, 1.0089e-04, 1.1166e-04, 1.2167e-04, 1.0681e-04, 1.1161e-04, + 1.1213e-04, 1.1321e-04], device='cuda:0') +2023-04-27 04:41:20,921 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5540, 2.1175, 1.8348, 2.0254, 1.4848, 1.7403, 1.7891, 1.4096], + device='cuda:0'), covar=tensor([0.1903, 0.1102, 0.0813, 0.0945, 0.3128, 0.1121, 0.1599, 0.2346], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0309, 0.0226, 0.0284, 0.0312, 0.0265, 0.0254, 0.0270], + device='cuda:0'), out_proj_covar=tensor([1.1755e-04, 1.2311e-04, 9.0158e-05, 1.1330e-04, 1.2721e-04, 1.0589e-04, + 1.0303e-04, 1.0794e-04], device='cuda:0') +2023-04-27 04:41:22,015 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.560e+02 1.991e+02 2.397e+02 3.754e+02, threshold=3.982e+02, percent-clipped=0.0 +2023-04-27 04:41:59,414 INFO [finetune.py:976] (0/7) Epoch 13, batch 4350, loss[loss=0.1648, simple_loss=0.234, pruned_loss=0.04783, over 4743.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2488, pruned_loss=0.05743, over 952004.92 frames. ], batch size: 54, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:42:38,502 INFO [finetune.py:976] (0/7) Epoch 13, batch 4400, loss[loss=0.2214, simple_loss=0.2863, pruned_loss=0.07823, over 4768.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2484, pruned_loss=0.05712, over 950369.98 frames. ], batch size: 28, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:42:47,735 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.074e+02 1.646e+02 2.036e+02 2.477e+02 5.410e+02, threshold=4.072e+02, percent-clipped=5.0 +2023-04-27 04:43:39,918 INFO [finetune.py:976] (0/7) Epoch 13, batch 4450, loss[loss=0.1836, simple_loss=0.2549, pruned_loss=0.05615, over 4899.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2506, pruned_loss=0.05716, over 951973.27 frames. ], batch size: 35, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:44:00,731 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-27 04:44:30,377 INFO [finetune.py:976] (0/7) Epoch 13, batch 4500, loss[loss=0.194, simple_loss=0.2609, pruned_loss=0.06356, over 4866.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2525, pruned_loss=0.05755, over 952994.70 frames. ], batch size: 31, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:44:37,122 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.883e+01 1.575e+02 1.982e+02 2.231e+02 3.715e+02, threshold=3.964e+02, percent-clipped=0.0 +2023-04-27 04:44:40,826 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73250.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:45:04,241 INFO [finetune.py:976] (0/7) Epoch 13, batch 4550, loss[loss=0.1622, simple_loss=0.2374, pruned_loss=0.04347, over 4862.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2542, pruned_loss=0.05779, over 953561.92 frames. ], batch size: 34, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:45:10,372 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73293.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:45:28,143 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73320.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 04:45:37,744 INFO [finetune.py:976] (0/7) Epoch 13, batch 4600, loss[loss=0.206, simple_loss=0.2695, pruned_loss=0.07123, over 4820.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2544, pruned_loss=0.05832, over 954621.98 frames. ], batch size: 38, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:45:44,461 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.205e+02 1.640e+02 1.996e+02 2.306e+02 3.998e+02, threshold=3.993e+02, percent-clipped=1.0 +2023-04-27 04:45:50,674 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73354.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:46:11,138 INFO [finetune.py:976] (0/7) Epoch 13, batch 4650, loss[loss=0.1324, simple_loss=0.2064, pruned_loss=0.0292, over 4756.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2527, pruned_loss=0.05779, over 956273.10 frames. ], batch size: 27, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:46:20,267 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-27 04:46:43,573 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 04:46:44,503 INFO [finetune.py:976] (0/7) Epoch 13, batch 4700, loss[loss=0.1645, simple_loss=0.2417, pruned_loss=0.0436, over 4778.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2499, pruned_loss=0.05732, over 955541.12 frames. ], batch size: 26, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:46:51,527 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.646e+02 2.000e+02 2.343e+02 4.660e+02, threshold=4.000e+02, percent-clipped=2.0 +2023-04-27 04:47:19,626 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6571, 1.7818, 2.0190, 2.0729, 1.9731, 2.1407, 2.1739, 2.1185], + device='cuda:0'), covar=tensor([0.4066, 0.5882, 0.5071, 0.5081, 0.5502, 0.7162, 0.5252, 0.5324], + device='cuda:0'), in_proj_covar=tensor([0.0328, 0.0373, 0.0315, 0.0326, 0.0338, 0.0395, 0.0354, 0.0322], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 04:47:33,205 INFO [finetune.py:976] (0/7) Epoch 13, batch 4750, loss[loss=0.1832, simple_loss=0.2445, pruned_loss=0.06095, over 4709.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.248, pruned_loss=0.05679, over 954279.56 frames. ], batch size: 23, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:48:17,023 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-27 04:48:28,607 INFO [finetune.py:976] (0/7) Epoch 13, batch 4800, loss[loss=0.2178, simple_loss=0.2892, pruned_loss=0.07319, over 4753.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2494, pruned_loss=0.05739, over 952209.25 frames. ], batch size: 59, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:48:36,794 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.600e+02 1.891e+02 2.207e+02 3.425e+02, threshold=3.783e+02, percent-clipped=0.0 +2023-04-27 04:48:41,052 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73550.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:49:02,204 INFO [finetune.py:976] (0/7) Epoch 13, batch 4850, loss[loss=0.1826, simple_loss=0.2678, pruned_loss=0.04873, over 4802.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2534, pruned_loss=0.05886, over 951899.51 frames. ], batch size: 45, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:49:12,451 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73590.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:49:22,936 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=73598.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:49:53,452 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73620.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 04:50:05,528 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 04:50:13,627 INFO [finetune.py:976] (0/7) Epoch 13, batch 4900, loss[loss=0.1725, simple_loss=0.2412, pruned_loss=0.05189, over 4761.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.255, pruned_loss=0.05915, over 953149.67 frames. ], batch size: 26, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:50:28,449 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.796e+02 2.177e+02 2.620e+02 4.604e+02, threshold=4.354e+02, percent-clipped=4.0 +2023-04-27 04:50:38,418 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73649.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:50:39,258 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.91 vs. limit=5.0 +2023-04-27 04:50:39,753 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73651.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:50:55,036 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=73668.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 04:50:55,870 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 04:51:04,150 INFO [finetune.py:976] (0/7) Epoch 13, batch 4950, loss[loss=0.1803, simple_loss=0.2608, pruned_loss=0.04986, over 4775.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2559, pruned_loss=0.05886, over 953843.13 frames. ], batch size: 26, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:51:36,571 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=73732.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:51:37,095 INFO [finetune.py:976] (0/7) Epoch 13, batch 5000, loss[loss=0.1563, simple_loss=0.235, pruned_loss=0.03875, over 4900.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2549, pruned_loss=0.0585, over 954433.43 frames. ], batch size: 43, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:51:45,204 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.282e+02 1.739e+02 2.081e+02 2.360e+02 4.914e+02, threshold=4.162e+02, percent-clipped=1.0 +2023-04-27 04:51:51,868 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-27 04:52:11,152 INFO [finetune.py:976] (0/7) Epoch 13, batch 5050, loss[loss=0.1328, simple_loss=0.1994, pruned_loss=0.03305, over 4811.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2529, pruned_loss=0.05822, over 953390.90 frames. ], batch size: 25, lr: 3.58e-03, grad_scale: 32.0 +2023-04-27 04:52:17,803 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=73793.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:52:56,561 INFO [finetune.py:976] (0/7) Epoch 13, batch 5100, loss[loss=0.1419, simple_loss=0.2143, pruned_loss=0.03473, over 4746.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2495, pruned_loss=0.05696, over 954615.44 frames. ], batch size: 23, lr: 3.57e-03, grad_scale: 32.0 +2023-04-27 04:53:01,559 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.45 vs. limit=5.0 +2023-04-27 04:53:02,056 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6055, 3.2681, 1.0448, 1.8319, 2.5015, 1.7131, 4.5392, 2.3775], + device='cuda:0'), covar=tensor([0.0613, 0.0697, 0.0947, 0.1394, 0.0542, 0.1039, 0.0167, 0.0618], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0065, 0.0048, 0.0046, 0.0050, 0.0051, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 04:53:03,639 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.578e+02 1.847e+02 2.242e+02 3.779e+02, threshold=3.694e+02, percent-clipped=0.0 +2023-04-27 04:53:44,070 INFO [finetune.py:976] (0/7) Epoch 13, batch 5150, loss[loss=0.2315, simple_loss=0.2876, pruned_loss=0.08772, over 4896.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2495, pruned_loss=0.05725, over 953555.83 frames. ], batch size: 35, lr: 3.57e-03, grad_scale: 32.0 +2023-04-27 04:54:18,052 INFO [finetune.py:976] (0/7) Epoch 13, batch 5200, loss[loss=0.1968, simple_loss=0.2652, pruned_loss=0.06419, over 4901.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.254, pruned_loss=0.05913, over 953383.78 frames. ], batch size: 35, lr: 3.57e-03, grad_scale: 32.0 +2023-04-27 04:54:24,749 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.235e+02 1.597e+02 1.940e+02 2.333e+02 4.411e+02, threshold=3.880e+02, percent-clipped=2.0 +2023-04-27 04:54:26,019 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=73946.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:54:28,353 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=73949.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:55:07,559 INFO [finetune.py:976] (0/7) Epoch 13, batch 5250, loss[loss=0.1325, simple_loss=0.2107, pruned_loss=0.0272, over 4736.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2538, pruned_loss=0.0582, over 953447.92 frames. ], batch size: 23, lr: 3.57e-03, grad_scale: 32.0 +2023-04-27 04:55:16,184 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=73997.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:55:18,588 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-74000.pt +2023-04-27 04:55:41,324 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 04:55:42,308 INFO [finetune.py:976] (0/7) Epoch 13, batch 5300, loss[loss=0.2199, simple_loss=0.2813, pruned_loss=0.0793, over 4870.00 frames. ], tot_loss[loss=0.1874, simple_loss=0.2565, pruned_loss=0.05914, over 955615.56 frames. ], batch size: 34, lr: 3.57e-03, grad_scale: 64.0 +2023-04-27 04:55:46,772 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 04:55:54,435 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.148e+02 1.549e+02 1.803e+02 2.231e+02 3.853e+02, threshold=3.607e+02, percent-clipped=0.0 +2023-04-27 04:56:19,416 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74064.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:56:36,840 INFO [finetune.py:976] (0/7) Epoch 13, batch 5350, loss[loss=0.2109, simple_loss=0.2794, pruned_loss=0.07117, over 4878.00 frames. ], tot_loss[loss=0.1875, simple_loss=0.2569, pruned_loss=0.05904, over 954884.44 frames. ], batch size: 32, lr: 3.57e-03, grad_scale: 64.0 +2023-04-27 04:56:39,870 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74088.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:56:44,119 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8664, 2.4360, 2.9306, 3.3532, 3.2052, 2.7399, 2.1920, 2.9409], + device='cuda:0'), covar=tensor([0.0793, 0.0992, 0.0541, 0.0526, 0.0551, 0.0828, 0.0777, 0.0541], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0203, 0.0183, 0.0174, 0.0178, 0.0183, 0.0156, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 04:57:05,233 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74125.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:57:10,417 INFO [finetune.py:976] (0/7) Epoch 13, batch 5400, loss[loss=0.1633, simple_loss=0.2209, pruned_loss=0.05289, over 4384.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2533, pruned_loss=0.05781, over 953678.00 frames. ], batch size: 19, lr: 3.57e-03, grad_scale: 64.0 +2023-04-27 04:57:17,119 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.047e+02 1.609e+02 1.962e+02 2.381e+02 6.341e+02, threshold=3.923e+02, percent-clipped=2.0 +2023-04-27 04:57:25,193 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-04-27 04:57:43,730 INFO [finetune.py:976] (0/7) Epoch 13, batch 5450, loss[loss=0.1399, simple_loss=0.2137, pruned_loss=0.03305, over 4798.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.251, pruned_loss=0.05665, over 955305.48 frames. ], batch size: 29, lr: 3.57e-03, grad_scale: 64.0 +2023-04-27 04:58:35,027 INFO [finetune.py:976] (0/7) Epoch 13, batch 5500, loss[loss=0.196, simple_loss=0.2703, pruned_loss=0.06088, over 4803.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2478, pruned_loss=0.05604, over 957450.43 frames. ], batch size: 45, lr: 3.57e-03, grad_scale: 64.0 +2023-04-27 04:58:39,448 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74240.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:58:42,297 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.578e+02 1.994e+02 2.336e+02 4.147e+02, threshold=3.989e+02, percent-clipped=1.0 +2023-04-27 04:58:43,042 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2691, 1.6642, 2.1529, 2.6466, 2.1122, 1.6088, 1.4318, 1.9265], + device='cuda:0'), covar=tensor([0.3306, 0.3304, 0.1698, 0.2489, 0.2791, 0.2779, 0.4291, 0.2189], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0247, 0.0222, 0.0315, 0.0216, 0.0229, 0.0230, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 04:58:43,595 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74246.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:58:49,272 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 04:59:08,628 INFO [finetune.py:976] (0/7) Epoch 13, batch 5550, loss[loss=0.2479, simple_loss=0.3201, pruned_loss=0.0878, over 4838.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2501, pruned_loss=0.05744, over 957409.35 frames. ], batch size: 47, lr: 3.57e-03, grad_scale: 64.0 +2023-04-27 04:59:15,396 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=74294.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:59:20,250 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74301.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 04:59:27,563 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5570, 1.8149, 1.7366, 2.4213, 2.5832, 2.1503, 2.0075, 1.8431], + device='cuda:0'), covar=tensor([0.1684, 0.1717, 0.1945, 0.1395, 0.1306, 0.1647, 0.2192, 0.2085], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0316, 0.0352, 0.0292, 0.0330, 0.0314, 0.0307, 0.0362], + device='cuda:0'), out_proj_covar=tensor([6.3925e-05, 6.6456e-05, 7.5360e-05, 5.9764e-05, 6.8728e-05, 6.6701e-05, + 6.5046e-05, 7.7491e-05], device='cuda:0') +2023-04-27 04:59:39,165 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8581, 2.5882, 1.8336, 1.9707, 1.3472, 1.3607, 1.9705, 1.2055], + device='cuda:0'), covar=tensor([0.2049, 0.1656, 0.1739, 0.1996, 0.2752, 0.2324, 0.1130, 0.2414], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0213, 0.0169, 0.0204, 0.0202, 0.0184, 0.0157, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 04:59:39,644 INFO [finetune.py:976] (0/7) Epoch 13, batch 5600, loss[loss=0.1867, simple_loss=0.264, pruned_loss=0.05463, over 4754.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2534, pruned_loss=0.0577, over 956510.82 frames. ], batch size: 59, lr: 3.57e-03, grad_scale: 64.0 +2023-04-27 04:59:46,068 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.613e+02 1.797e+02 2.128e+02 3.644e+02, threshold=3.594e+02, percent-clipped=1.0 +2023-04-27 04:59:52,698 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-27 05:00:16,162 INFO [finetune.py:976] (0/7) Epoch 13, batch 5650, loss[loss=0.2315, simple_loss=0.2973, pruned_loss=0.08282, over 4825.00 frames. ], tot_loss[loss=0.1862, simple_loss=0.2556, pruned_loss=0.05842, over 956078.94 frames. ], batch size: 40, lr: 3.57e-03, grad_scale: 64.0 +2023-04-27 05:00:25,012 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74388.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:00:44,664 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74420.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:00:52,320 INFO [finetune.py:976] (0/7) Epoch 13, batch 5700, loss[loss=0.1461, simple_loss=0.2037, pruned_loss=0.04421, over 4062.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2518, pruned_loss=0.05769, over 937969.92 frames. ], batch size: 17, lr: 3.57e-03, grad_scale: 64.0 +2023-04-27 05:00:54,133 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=74436.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:00:58,755 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.564e+02 1.898e+02 2.359e+02 3.680e+02, threshold=3.797e+02, percent-clipped=1.0 +2023-04-27 05:01:09,143 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-13.pt +2023-04-27 05:01:23,649 INFO [finetune.py:976] (0/7) Epoch 14, batch 0, loss[loss=0.1635, simple_loss=0.2491, pruned_loss=0.03897, over 4897.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2491, pruned_loss=0.03897, over 4897.00 frames. ], batch size: 43, lr: 3.57e-03, grad_scale: 64.0 +2023-04-27 05:01:23,650 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 05:01:45,229 INFO [finetune.py:1010] (0/7) Epoch 14, validation: loss=0.1535, simple_loss=0.226, pruned_loss=0.04054, over 2265189.00 frames. +2023-04-27 05:01:45,229 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 05:01:57,184 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5119, 1.6071, 0.6088, 1.2218, 1.5582, 1.3677, 1.2703, 1.3007], + device='cuda:0'), covar=tensor([0.0531, 0.0364, 0.0405, 0.0567, 0.0284, 0.0523, 0.0504, 0.0596], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0038, 0.0050, 0.0037, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 05:02:18,139 INFO [finetune.py:976] (0/7) Epoch 14, batch 50, loss[loss=0.1502, simple_loss=0.2235, pruned_loss=0.03849, over 4758.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.2527, pruned_loss=0.0578, over 216581.28 frames. ], batch size: 26, lr: 3.57e-03, grad_scale: 32.0 +2023-04-27 05:02:41,160 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.646e+01 1.673e+02 1.901e+02 2.437e+02 4.267e+02, threshold=3.802e+02, percent-clipped=2.0 +2023-04-27 05:02:51,801 INFO [finetune.py:976] (0/7) Epoch 14, batch 100, loss[loss=0.157, simple_loss=0.2165, pruned_loss=0.04875, over 4886.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2478, pruned_loss=0.0566, over 380999.04 frames. ], batch size: 32, lr: 3.57e-03, grad_scale: 32.0 +2023-04-27 05:03:09,801 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.87 vs. limit=5.0 +2023-04-27 05:03:29,424 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74595.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:03:30,034 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74596.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:03:51,942 INFO [finetune.py:976] (0/7) Epoch 14, batch 150, loss[loss=0.1549, simple_loss=0.2203, pruned_loss=0.04471, over 4899.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2447, pruned_loss=0.05632, over 508238.62 frames. ], batch size: 35, lr: 3.57e-03, grad_scale: 32.0 +2023-04-27 05:04:13,490 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.18 vs. limit=5.0 +2023-04-27 05:04:20,052 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.548e+02 1.852e+02 2.253e+02 4.630e+02, threshold=3.704e+02, percent-clipped=3.0 +2023-04-27 05:04:22,684 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-27 05:04:27,505 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74656.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:04:31,408 INFO [finetune.py:976] (0/7) Epoch 14, batch 200, loss[loss=0.1578, simple_loss=0.2223, pruned_loss=0.04667, over 4905.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2426, pruned_loss=0.05552, over 606427.17 frames. ], batch size: 35, lr: 3.57e-03, grad_scale: 32.0 +2023-04-27 05:04:44,968 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1893, 2.7185, 1.2227, 1.5298, 2.0648, 1.4155, 3.2112, 1.7789], + device='cuda:0'), covar=tensor([0.0605, 0.0823, 0.0787, 0.1116, 0.0454, 0.0863, 0.0229, 0.0615], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 05:04:50,484 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7235, 2.0998, 1.0416, 1.3821, 2.1252, 1.5578, 1.5148, 1.5683], + device='cuda:0'), covar=tensor([0.0517, 0.0334, 0.0341, 0.0589, 0.0260, 0.0542, 0.0502, 0.0570], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 05:04:58,316 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5368, 1.5766, 0.7542, 1.2290, 1.5540, 1.3977, 1.2834, 1.3470], + device='cuda:0'), covar=tensor([0.0503, 0.0379, 0.0387, 0.0609, 0.0304, 0.0535, 0.0509, 0.0584], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0038, 0.0051, 0.0037, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 05:05:05,314 INFO [finetune.py:976] (0/7) Epoch 14, batch 250, loss[loss=0.1254, simple_loss=0.1994, pruned_loss=0.02575, over 4752.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2468, pruned_loss=0.05678, over 679786.35 frames. ], batch size: 27, lr: 3.57e-03, grad_scale: 32.0 +2023-04-27 05:05:11,818 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74720.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:05:39,271 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.695e+01 1.601e+02 1.840e+02 2.318e+02 4.456e+02, threshold=3.680e+02, percent-clipped=1.0 +2023-04-27 05:05:49,927 INFO [finetune.py:976] (0/7) Epoch 14, batch 300, loss[loss=0.1293, simple_loss=0.1937, pruned_loss=0.03242, over 4722.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2518, pruned_loss=0.05881, over 741017.48 frames. ], batch size: 23, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:05:50,061 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7243, 1.3937, 1.3185, 1.5653, 1.9463, 1.5519, 1.3506, 1.2569], + device='cuda:0'), covar=tensor([0.1672, 0.1485, 0.1739, 0.1636, 0.0870, 0.1527, 0.2213, 0.2036], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0316, 0.0351, 0.0292, 0.0329, 0.0314, 0.0305, 0.0362], + device='cuda:0'), out_proj_covar=tensor([6.3907e-05, 6.6354e-05, 7.5146e-05, 5.9711e-05, 6.8594e-05, 6.6543e-05, + 6.4668e-05, 7.7461e-05], device='cuda:0') +2023-04-27 05:05:54,759 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=74768.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:06:23,249 INFO [finetune.py:976] (0/7) Epoch 14, batch 350, loss[loss=0.2013, simple_loss=0.2744, pruned_loss=0.06413, over 4906.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2543, pruned_loss=0.05895, over 789617.92 frames. ], batch size: 37, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:07:09,533 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.682e+02 1.913e+02 2.290e+02 4.465e+02, threshold=3.826e+02, percent-clipped=1.0 +2023-04-27 05:07:25,449 INFO [finetune.py:976] (0/7) Epoch 14, batch 400, loss[loss=0.2268, simple_loss=0.3133, pruned_loss=0.07011, over 4877.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2548, pruned_loss=0.05822, over 827906.95 frames. ], batch size: 43, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:07:47,524 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-04-27 05:07:49,822 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=74896.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:07:57,754 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74909.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:07:59,304 INFO [finetune.py:976] (0/7) Epoch 14, batch 450, loss[loss=0.1977, simple_loss=0.2627, pruned_loss=0.06638, over 4896.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2522, pruned_loss=0.05627, over 855440.49 frames. ], batch size: 43, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:08:37,890 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=74944.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:08:38,411 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.611e+01 1.647e+02 1.819e+02 2.107e+02 4.773e+02, threshold=3.638e+02, percent-clipped=2.0 +2023-04-27 05:08:42,162 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=74951.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:08:53,659 INFO [finetune.py:976] (0/7) Epoch 14, batch 500, loss[loss=0.1431, simple_loss=0.2136, pruned_loss=0.03634, over 4781.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2494, pruned_loss=0.05537, over 877529.18 frames. ], batch size: 26, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:08:56,090 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=74964.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:08:56,157 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 05:08:59,768 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=74970.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:09:03,782 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0661, 1.6513, 1.4830, 1.8319, 1.7567, 1.9795, 1.4377, 3.6149], + device='cuda:0'), covar=tensor([0.0643, 0.0805, 0.0781, 0.1136, 0.0620, 0.0551, 0.0739, 0.0148], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 05:09:04,436 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7203, 1.9494, 0.7885, 1.4354, 1.9294, 1.5750, 1.5138, 1.5302], + device='cuda:0'), covar=tensor([0.0509, 0.0360, 0.0383, 0.0608, 0.0264, 0.0559, 0.0558, 0.0606], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 05:09:06,925 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9760, 2.1006, 2.1463, 2.8070, 2.9264, 2.5174, 2.4218, 2.2102], + device='cuda:0'), covar=tensor([0.1452, 0.1665, 0.1783, 0.1431, 0.0972, 0.1464, 0.2174, 0.1944], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0317, 0.0352, 0.0294, 0.0331, 0.0315, 0.0307, 0.0363], + device='cuda:0'), out_proj_covar=tensor([6.3996e-05, 6.6600e-05, 7.5462e-05, 6.0035e-05, 6.8944e-05, 6.6899e-05, + 6.5030e-05, 7.7473e-05], device='cuda:0') +2023-04-27 05:09:27,829 INFO [finetune.py:976] (0/7) Epoch 14, batch 550, loss[loss=0.192, simple_loss=0.2712, pruned_loss=0.0564, over 4835.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2479, pruned_loss=0.05559, over 894949.52 frames. ], batch size: 49, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:09:37,440 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75025.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:09:47,390 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.68 vs. limit=5.0 +2023-04-27 05:09:51,444 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.675e+02 1.910e+02 2.434e+02 4.302e+02, threshold=3.821e+02, percent-clipped=4.0 +2023-04-27 05:09:58,161 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6252, 1.7482, 0.5921, 1.3424, 1.8585, 1.5223, 1.4368, 1.4369], + device='cuda:0'), covar=tensor([0.0519, 0.0368, 0.0386, 0.0574, 0.0277, 0.0534, 0.0485, 0.0613], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 05:10:01,742 INFO [finetune.py:976] (0/7) Epoch 14, batch 600, loss[loss=0.1736, simple_loss=0.2505, pruned_loss=0.04837, over 4875.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2488, pruned_loss=0.05634, over 908472.86 frames. ], batch size: 34, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:10:26,821 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5860, 2.2667, 2.7198, 2.9363, 3.0542, 2.3060, 1.9405, 2.3591], + device='cuda:0'), covar=tensor([0.0872, 0.0990, 0.0566, 0.0659, 0.0562, 0.0906, 0.0837, 0.0722], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0203, 0.0183, 0.0173, 0.0178, 0.0183, 0.0156, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:10:35,744 INFO [finetune.py:976] (0/7) Epoch 14, batch 650, loss[loss=0.179, simple_loss=0.2541, pruned_loss=0.05192, over 4924.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.25, pruned_loss=0.05638, over 917525.44 frames. ], batch size: 38, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:10:43,022 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8534, 3.8025, 2.8614, 4.4191, 3.7945, 3.8485, 1.7169, 3.7505], + device='cuda:0'), covar=tensor([0.1618, 0.1100, 0.2902, 0.1531, 0.3444, 0.1841, 0.5515, 0.2344], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0212, 0.0246, 0.0301, 0.0295, 0.0244, 0.0269, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 05:10:59,289 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.654e+02 2.108e+02 2.537e+02 4.925e+02, threshold=4.216e+02, percent-clipped=6.0 +2023-04-27 05:11:09,458 INFO [finetune.py:976] (0/7) Epoch 14, batch 700, loss[loss=0.1317, simple_loss=0.1881, pruned_loss=0.03758, over 3872.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2532, pruned_loss=0.05794, over 925206.71 frames. ], batch size: 16, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:11:34,588 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4399, 2.2120, 2.6282, 2.7664, 2.9725, 2.2856, 1.9659, 2.2544], + device='cuda:0'), covar=tensor([0.0915, 0.1016, 0.0580, 0.0653, 0.0560, 0.0856, 0.0841, 0.0731], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0205, 0.0184, 0.0174, 0.0180, 0.0185, 0.0157, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:11:43,428 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-27 05:11:46,826 INFO [finetune.py:976] (0/7) Epoch 14, batch 750, loss[loss=0.1497, simple_loss=0.2267, pruned_loss=0.03634, over 4745.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2543, pruned_loss=0.05794, over 933430.34 frames. ], batch size: 26, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:12:09,244 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75228.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:12:31,821 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.600e+02 1.912e+02 2.289e+02 3.679e+02, threshold=3.824e+02, percent-clipped=0.0 +2023-04-27 05:12:35,573 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75251.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:12:41,569 INFO [finetune.py:976] (0/7) Epoch 14, batch 800, loss[loss=0.203, simple_loss=0.2582, pruned_loss=0.07389, over 4813.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2536, pruned_loss=0.05767, over 935641.95 frames. ], batch size: 25, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:12:44,540 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75265.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:12:47,605 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=75270.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:13:00,653 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75289.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:13:07,659 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=75299.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:13:14,861 INFO [finetune.py:976] (0/7) Epoch 14, batch 850, loss[loss=0.1723, simple_loss=0.2346, pruned_loss=0.05498, over 4047.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2527, pruned_loss=0.05818, over 939043.54 frames. ], batch size: 17, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:13:20,842 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75320.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:13:40,173 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=75331.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 05:13:56,952 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.618e+02 2.006e+02 2.388e+02 8.621e+02, threshold=4.012e+02, percent-clipped=6.0 +2023-04-27 05:14:08,745 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 05:14:12,839 INFO [finetune.py:976] (0/7) Epoch 14, batch 900, loss[loss=0.1711, simple_loss=0.2454, pruned_loss=0.04835, over 4768.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2498, pruned_loss=0.05704, over 942721.91 frames. ], batch size: 27, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:14:12,949 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5186, 2.9397, 0.7686, 1.6900, 2.3809, 1.5318, 4.2863, 2.2390], + device='cuda:0'), covar=tensor([0.0647, 0.0706, 0.0965, 0.1402, 0.0546, 0.1059, 0.0249, 0.0629], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 05:14:50,279 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-27 05:14:57,513 INFO [finetune.py:976] (0/7) Epoch 14, batch 950, loss[loss=0.2304, simple_loss=0.2902, pruned_loss=0.08529, over 4913.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.249, pruned_loss=0.0573, over 947204.66 frames. ], batch size: 37, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:15:02,569 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6552, 1.2985, 1.3661, 1.3168, 1.8845, 1.4650, 1.2099, 1.3056], + device='cuda:0'), covar=tensor([0.1573, 0.1345, 0.2161, 0.1405, 0.0857, 0.1462, 0.1944, 0.2114], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0317, 0.0354, 0.0293, 0.0332, 0.0315, 0.0307, 0.0363], + device='cuda:0'), out_proj_covar=tensor([6.4060e-05, 6.6676e-05, 7.5968e-05, 5.9990e-05, 6.9202e-05, 6.6675e-05, + 6.4967e-05, 7.7640e-05], device='cuda:0') +2023-04-27 05:15:04,277 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-27 05:15:20,049 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.489e+02 1.870e+02 2.238e+02 3.708e+02, threshold=3.739e+02, percent-clipped=0.0 +2023-04-27 05:15:30,321 INFO [finetune.py:976] (0/7) Epoch 14, batch 1000, loss[loss=0.1707, simple_loss=0.2455, pruned_loss=0.04793, over 4752.00 frames. ], tot_loss[loss=0.1841, simple_loss=0.252, pruned_loss=0.05811, over 949702.94 frames. ], batch size: 28, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:16:03,284 INFO [finetune.py:976] (0/7) Epoch 14, batch 1050, loss[loss=0.2142, simple_loss=0.2613, pruned_loss=0.08349, over 4932.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2539, pruned_loss=0.05761, over 952853.02 frames. ], batch size: 33, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:16:19,073 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9576, 1.4290, 1.7672, 1.6720, 1.7589, 1.3931, 0.7488, 1.4224], + device='cuda:0'), covar=tensor([0.3384, 0.3401, 0.1678, 0.2352, 0.2509, 0.2672, 0.4152, 0.2119], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0247, 0.0222, 0.0315, 0.0215, 0.0229, 0.0229, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 05:16:25,348 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.198e+02 1.730e+02 1.979e+02 2.287e+02 8.214e+02, threshold=3.958e+02, percent-clipped=1.0 +2023-04-27 05:16:36,966 INFO [finetune.py:976] (0/7) Epoch 14, batch 1100, loss[loss=0.2256, simple_loss=0.2995, pruned_loss=0.07585, over 4924.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.255, pruned_loss=0.05765, over 954674.25 frames. ], batch size: 38, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:16:39,528 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75565.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:16:57,395 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75584.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:17:07,631 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6444, 2.6532, 2.2485, 3.0597, 2.5900, 2.5972, 1.3778, 2.6184], + device='cuda:0'), covar=tensor([0.2304, 0.1899, 0.4507, 0.3502, 0.2974, 0.2289, 0.4859, 0.2921], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0213, 0.0248, 0.0303, 0.0297, 0.0246, 0.0270, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 05:17:31,382 INFO [finetune.py:976] (0/7) Epoch 14, batch 1150, loss[loss=0.1616, simple_loss=0.2469, pruned_loss=0.03818, over 4896.00 frames. ], tot_loss[loss=0.1864, simple_loss=0.2562, pruned_loss=0.05829, over 953908.59 frames. ], batch size: 36, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:17:32,668 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=75613.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:17:36,933 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75620.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:17:40,639 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=75626.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 05:17:42,595 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-27 05:17:53,272 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.637e+02 1.916e+02 2.260e+02 4.790e+02, threshold=3.833e+02, percent-clipped=2.0 +2023-04-27 05:18:04,438 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1796, 2.6755, 0.9416, 1.4016, 2.0727, 1.2281, 3.5475, 1.6723], + device='cuda:0'), covar=tensor([0.0638, 0.0742, 0.0914, 0.1237, 0.0528, 0.0997, 0.0220, 0.0646], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0066, 0.0048, 0.0046, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 05:18:05,378 INFO [finetune.py:976] (0/7) Epoch 14, batch 1200, loss[loss=0.2, simple_loss=0.2495, pruned_loss=0.07524, over 4936.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2538, pruned_loss=0.0573, over 954577.95 frames. ], batch size: 38, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:18:09,671 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=75668.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:18:21,865 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 05:18:25,244 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8223, 3.8698, 2.8841, 4.4914, 3.9466, 3.8402, 1.7211, 3.8534], + device='cuda:0'), covar=tensor([0.1632, 0.1093, 0.3056, 0.1451, 0.2438, 0.1841, 0.5725, 0.2186], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0213, 0.0248, 0.0300, 0.0296, 0.0246, 0.0269, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 05:18:28,162 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7100, 1.2708, 1.3572, 1.6242, 1.9370, 1.6810, 1.5015, 1.2699], + device='cuda:0'), covar=tensor([0.1268, 0.1381, 0.1470, 0.1344, 0.0690, 0.1395, 0.1656, 0.1573], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0314, 0.0352, 0.0290, 0.0329, 0.0312, 0.0303, 0.0360], + device='cuda:0'), out_proj_covar=tensor([6.3446e-05, 6.5923e-05, 7.5536e-05, 5.9139e-05, 6.8496e-05, 6.6175e-05, + 6.4111e-05, 7.7034e-05], device='cuda:0') +2023-04-27 05:18:43,840 INFO [finetune.py:976] (0/7) Epoch 14, batch 1250, loss[loss=0.1808, simple_loss=0.2439, pruned_loss=0.05885, over 4274.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2508, pruned_loss=0.05631, over 954536.31 frames. ], batch size: 65, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:19:24,353 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.269e+02 1.670e+02 2.009e+02 2.395e+02 6.270e+02, threshold=4.018e+02, percent-clipped=3.0 +2023-04-27 05:19:45,790 INFO [finetune.py:976] (0/7) Epoch 14, batch 1300, loss[loss=0.1642, simple_loss=0.2339, pruned_loss=0.04727, over 4826.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2489, pruned_loss=0.0559, over 955411.56 frames. ], batch size: 33, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:20:24,973 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3490, 1.8505, 2.1326, 2.5513, 2.6230, 2.1234, 1.6864, 2.2096], + device='cuda:0'), covar=tensor([0.0792, 0.1162, 0.0686, 0.0630, 0.0614, 0.0831, 0.0803, 0.0582], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0205, 0.0184, 0.0174, 0.0180, 0.0184, 0.0157, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:20:36,065 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4926, 3.2487, 1.0450, 2.0324, 2.0370, 2.5084, 2.0493, 1.2121], + device='cuda:0'), covar=tensor([0.1244, 0.0793, 0.1722, 0.1045, 0.0913, 0.0869, 0.1233, 0.1958], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0248, 0.0140, 0.0122, 0.0134, 0.0154, 0.0119, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 05:20:50,514 INFO [finetune.py:976] (0/7) Epoch 14, batch 1350, loss[loss=0.2088, simple_loss=0.2763, pruned_loss=0.07068, over 4834.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2488, pruned_loss=0.05633, over 953733.91 frames. ], batch size: 47, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:21:39,791 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.563e+02 1.910e+02 2.423e+02 5.618e+02, threshold=3.821e+02, percent-clipped=3.0 +2023-04-27 05:21:55,184 INFO [finetune.py:976] (0/7) Epoch 14, batch 1400, loss[loss=0.1762, simple_loss=0.2621, pruned_loss=0.04513, over 4803.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2537, pruned_loss=0.05785, over 954768.07 frames. ], batch size: 41, lr: 3.56e-03, grad_scale: 32.0 +2023-04-27 05:22:34,323 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75884.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:22:40,538 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6428, 1.3237, 1.8499, 2.1752, 1.7997, 1.6784, 1.7770, 1.7369], + device='cuda:0'), covar=tensor([0.5221, 0.7246, 0.6715, 0.6369, 0.6096, 0.8425, 0.8285, 0.8875], + device='cuda:0'), in_proj_covar=tensor([0.0416, 0.0410, 0.0497, 0.0515, 0.0446, 0.0466, 0.0474, 0.0477], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:22:51,835 INFO [finetune.py:976] (0/7) Epoch 14, batch 1450, loss[loss=0.1493, simple_loss=0.2259, pruned_loss=0.03633, over 4752.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2549, pruned_loss=0.05745, over 954921.33 frames. ], batch size: 59, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:23:03,104 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=75926.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 05:23:06,747 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=75932.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:23:14,581 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.664e+02 2.009e+02 2.321e+02 4.322e+02, threshold=4.018e+02, percent-clipped=2.0 +2023-04-27 05:23:25,304 INFO [finetune.py:976] (0/7) Epoch 14, batch 1500, loss[loss=0.1963, simple_loss=0.2776, pruned_loss=0.05752, over 4911.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2561, pruned_loss=0.05777, over 955650.01 frames. ], batch size: 33, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:23:34,212 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=75974.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:24:01,460 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5502, 2.0286, 5.3904, 4.7913, 4.7318, 5.0790, 4.5088, 4.6476], + device='cuda:0'), covar=tensor([0.6704, 0.6842, 0.0936, 0.2244, 0.1413, 0.2246, 0.1552, 0.2005], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0303, 0.0398, 0.0399, 0.0342, 0.0400, 0.0309, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:24:02,738 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-76000.pt +2023-04-27 05:24:11,483 INFO [finetune.py:976] (0/7) Epoch 14, batch 1550, loss[loss=0.1665, simple_loss=0.2477, pruned_loss=0.04261, over 4887.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2559, pruned_loss=0.05754, over 956971.85 frames. ], batch size: 32, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:24:40,106 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.063e+02 1.630e+02 1.920e+02 2.278e+02 6.745e+02, threshold=3.839e+02, percent-clipped=3.0 +2023-04-27 05:24:54,209 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6097, 1.7797, 1.8047, 2.4155, 2.5969, 2.1287, 2.0716, 1.8310], + device='cuda:0'), covar=tensor([0.1649, 0.1990, 0.2666, 0.1445, 0.1222, 0.1954, 0.2276, 0.2411], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0315, 0.0351, 0.0291, 0.0329, 0.0313, 0.0304, 0.0361], + device='cuda:0'), out_proj_covar=tensor([6.3783e-05, 6.6040e-05, 7.5302e-05, 5.9528e-05, 6.8445e-05, 6.6280e-05, + 6.4310e-05, 7.7250e-05], device='cuda:0') +2023-04-27 05:25:01,381 INFO [finetune.py:976] (0/7) Epoch 14, batch 1600, loss[loss=0.1637, simple_loss=0.2329, pruned_loss=0.04722, over 4915.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2533, pruned_loss=0.05697, over 957829.38 frames. ], batch size: 46, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:25:34,943 INFO [finetune.py:976] (0/7) Epoch 14, batch 1650, loss[loss=0.1533, simple_loss=0.2359, pruned_loss=0.03533, over 4791.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.25, pruned_loss=0.05566, over 958702.95 frames. ], batch size: 29, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:25:58,463 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.559e+02 1.837e+02 2.256e+02 6.477e+02, threshold=3.674e+02, percent-clipped=1.0 +2023-04-27 05:26:08,248 INFO [finetune.py:976] (0/7) Epoch 14, batch 1700, loss[loss=0.1783, simple_loss=0.2506, pruned_loss=0.05302, over 4874.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2476, pruned_loss=0.05513, over 957748.96 frames. ], batch size: 34, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:26:42,129 INFO [finetune.py:976] (0/7) Epoch 14, batch 1750, loss[loss=0.2072, simple_loss=0.2649, pruned_loss=0.07476, over 4760.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2496, pruned_loss=0.05587, over 956789.59 frames. ], batch size: 27, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:27:06,457 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.610e+02 1.943e+02 2.415e+02 4.122e+02, threshold=3.886e+02, percent-clipped=3.0 +2023-04-27 05:27:08,526 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.02 vs. limit=5.0 +2023-04-27 05:27:14,105 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.24 vs. limit=5.0 +2023-04-27 05:27:16,250 INFO [finetune.py:976] (0/7) Epoch 14, batch 1800, loss[loss=0.1483, simple_loss=0.2218, pruned_loss=0.03738, over 4751.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2524, pruned_loss=0.05614, over 956584.86 frames. ], batch size: 26, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:27:27,558 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76269.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 05:27:53,336 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.25 vs. limit=5.0 +2023-04-27 05:28:02,595 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76294.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:28:12,928 INFO [finetune.py:976] (0/7) Epoch 14, batch 1850, loss[loss=0.1595, simple_loss=0.2174, pruned_loss=0.05081, over 4716.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2552, pruned_loss=0.05711, over 957858.25 frames. ], batch size: 23, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:28:20,380 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.82 vs. limit=2.0 +2023-04-27 05:28:20,915 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76323.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 05:28:23,433 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-27 05:28:25,680 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76330.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 05:28:36,248 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.653e+02 2.056e+02 2.488e+02 6.955e+02, threshold=4.112e+02, percent-clipped=5.0 +2023-04-27 05:28:41,490 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9452, 3.7813, 2.8157, 4.4371, 3.8659, 3.8517, 1.8060, 3.7860], + device='cuda:0'), covar=tensor([0.1677, 0.1107, 0.2887, 0.1452, 0.3615, 0.1671, 0.5546, 0.2361], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0214, 0.0249, 0.0303, 0.0298, 0.0247, 0.0272, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 05:28:43,346 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76355.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:28:46,905 INFO [finetune.py:976] (0/7) Epoch 14, batch 1900, loss[loss=0.1513, simple_loss=0.2104, pruned_loss=0.04609, over 4372.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2558, pruned_loss=0.05716, over 957314.48 frames. ], batch size: 19, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:28:56,637 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1740, 2.3231, 1.9787, 1.9670, 2.3933, 1.9263, 2.9665, 1.6933], + device='cuda:0'), covar=tensor([0.4196, 0.1748, 0.4833, 0.3662, 0.1836, 0.2879, 0.1352, 0.4683], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0348, 0.0428, 0.0359, 0.0382, 0.0384, 0.0374, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:28:57,806 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3170, 1.1925, 3.7930, 3.5451, 3.3439, 3.6198, 3.5914, 3.3692], + device='cuda:0'), covar=tensor([0.6654, 0.5755, 0.1054, 0.1712, 0.1146, 0.1676, 0.1868, 0.1514], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0305, 0.0399, 0.0401, 0.0345, 0.0401, 0.0312, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:29:01,971 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76384.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 05:29:20,623 INFO [finetune.py:976] (0/7) Epoch 14, batch 1950, loss[loss=0.154, simple_loss=0.2274, pruned_loss=0.04024, over 4823.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2552, pruned_loss=0.0569, over 957014.19 frames. ], batch size: 39, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:29:42,666 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.125e+01 1.567e+02 1.836e+02 2.067e+02 4.304e+02, threshold=3.671e+02, percent-clipped=1.0 +2023-04-27 05:29:55,871 INFO [finetune.py:976] (0/7) Epoch 14, batch 2000, loss[loss=0.1792, simple_loss=0.2408, pruned_loss=0.05875, over 4817.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2517, pruned_loss=0.0565, over 956498.84 frames. ], batch size: 39, lr: 3.55e-03, grad_scale: 32.0 +2023-04-27 05:30:58,157 INFO [finetune.py:976] (0/7) Epoch 14, batch 2050, loss[loss=0.1623, simple_loss=0.2293, pruned_loss=0.04764, over 4765.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2481, pruned_loss=0.05584, over 954625.70 frames. ], batch size: 26, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:31:19,794 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.114e+02 1.549e+02 1.874e+02 2.355e+02 5.627e+02, threshold=3.748e+02, percent-clipped=2.0 +2023-04-27 05:31:24,276 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-27 05:31:32,054 INFO [finetune.py:976] (0/7) Epoch 14, batch 2100, loss[loss=0.1724, simple_loss=0.246, pruned_loss=0.04941, over 4916.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2475, pruned_loss=0.05581, over 955419.92 frames. ], batch size: 36, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:32:06,102 INFO [finetune.py:976] (0/7) Epoch 14, batch 2150, loss[loss=0.1591, simple_loss=0.2412, pruned_loss=0.03852, over 4801.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2513, pruned_loss=0.05732, over 953442.02 frames. ], batch size: 25, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:32:14,732 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76625.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 05:32:27,725 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.703e+02 1.997e+02 2.488e+02 3.640e+02, threshold=3.993e+02, percent-clipped=1.0 +2023-04-27 05:32:30,843 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76650.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:32:44,022 INFO [finetune.py:976] (0/7) Epoch 14, batch 2200, loss[loss=0.1899, simple_loss=0.2433, pruned_loss=0.06829, over 4743.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2532, pruned_loss=0.05777, over 954593.56 frames. ], batch size: 23, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:33:06,756 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=76679.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 05:33:08,598 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4535, 3.0615, 2.5493, 2.9919, 2.2330, 2.6600, 2.6543, 1.9160], + device='cuda:0'), covar=tensor([0.2120, 0.1293, 0.0838, 0.1165, 0.3198, 0.1211, 0.2118, 0.3057], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0313, 0.0227, 0.0287, 0.0314, 0.0268, 0.0257, 0.0273], + device='cuda:0'), out_proj_covar=tensor([1.1851e-04, 1.2481e-04, 9.0739e-05, 1.1438e-04, 1.2814e-04, 1.0706e-04, + 1.0436e-04, 1.0903e-04], device='cuda:0') +2023-04-27 05:33:39,428 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 05:33:46,580 INFO [finetune.py:976] (0/7) Epoch 14, batch 2250, loss[loss=0.1675, simple_loss=0.2252, pruned_loss=0.05489, over 4717.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2535, pruned_loss=0.0578, over 953178.52 frames. ], batch size: 23, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:34:30,286 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.017e+02 1.594e+02 1.898e+02 2.176e+02 4.652e+02, threshold=3.795e+02, percent-clipped=1.0 +2023-04-27 05:34:47,191 INFO [finetune.py:976] (0/7) Epoch 14, batch 2300, loss[loss=0.1624, simple_loss=0.2312, pruned_loss=0.04681, over 4898.00 frames. ], tot_loss[loss=0.185, simple_loss=0.2543, pruned_loss=0.05784, over 951462.81 frames. ], batch size: 36, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:34:52,204 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 05:35:26,138 INFO [finetune.py:976] (0/7) Epoch 14, batch 2350, loss[loss=0.1835, simple_loss=0.2566, pruned_loss=0.05519, over 4737.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2514, pruned_loss=0.0565, over 952396.27 frames. ], batch size: 54, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:36:05,985 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=76839.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:36:09,507 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.656e+02 1.888e+02 2.270e+02 4.545e+02, threshold=3.776e+02, percent-clipped=2.0 +2023-04-27 05:36:13,171 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7901, 2.1156, 1.2181, 1.4847, 2.1612, 1.6744, 1.5986, 1.6360], + device='cuda:0'), covar=tensor([0.0500, 0.0338, 0.0285, 0.0542, 0.0229, 0.0473, 0.0460, 0.0555], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0030, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 05:36:15,520 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4498, 1.0164, 1.2851, 1.1417, 1.6114, 1.3309, 1.0683, 1.1996], + device='cuda:0'), covar=tensor([0.1409, 0.1337, 0.1643, 0.1318, 0.0756, 0.1513, 0.1759, 0.1762], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0314, 0.0351, 0.0289, 0.0328, 0.0314, 0.0302, 0.0360], + device='cuda:0'), out_proj_covar=tensor([6.3514e-05, 6.6043e-05, 7.5293e-05, 5.9092e-05, 6.8257e-05, 6.6573e-05, + 6.4015e-05, 7.6938e-05], device='cuda:0') +2023-04-27 05:36:20,258 INFO [finetune.py:976] (0/7) Epoch 14, batch 2400, loss[loss=0.1808, simple_loss=0.2453, pruned_loss=0.05812, over 4831.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2486, pruned_loss=0.05537, over 954283.32 frames. ], batch size: 40, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:36:30,206 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5496, 1.5228, 1.8434, 1.8112, 1.4618, 1.2488, 1.5766, 1.0510], + device='cuda:0'), covar=tensor([0.0640, 0.0621, 0.0456, 0.0691, 0.0804, 0.0986, 0.0705, 0.0713], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0070, 0.0069, 0.0068, 0.0074, 0.0095, 0.0075, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 05:36:32,610 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 05:36:46,510 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=76900.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:36:54,077 INFO [finetune.py:976] (0/7) Epoch 14, batch 2450, loss[loss=0.163, simple_loss=0.2316, pruned_loss=0.04723, over 4822.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2477, pruned_loss=0.05573, over 954542.11 frames. ], batch size: 39, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:36:56,017 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0233, 2.5474, 2.0092, 2.0381, 1.4959, 1.4558, 2.0474, 1.3804], + device='cuda:0'), covar=tensor([0.1675, 0.1583, 0.1465, 0.1825, 0.2331, 0.2105, 0.1097, 0.2126], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0213, 0.0168, 0.0205, 0.0201, 0.0184, 0.0157, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 05:37:04,117 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76925.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 05:37:17,025 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.514e+02 1.972e+02 2.483e+02 3.800e+02, threshold=3.944e+02, percent-clipped=1.0 +2023-04-27 05:37:20,142 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76950.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:37:27,669 INFO [finetune.py:976] (0/7) Epoch 14, batch 2500, loss[loss=0.2264, simple_loss=0.2961, pruned_loss=0.07838, over 4858.00 frames. ], tot_loss[loss=0.182, simple_loss=0.25, pruned_loss=0.05698, over 955192.25 frames. ], batch size: 44, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:37:36,005 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=76973.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 05:37:40,692 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=76979.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 05:37:52,649 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=76998.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:38:07,210 INFO [finetune.py:976] (0/7) Epoch 14, batch 2550, loss[loss=0.2031, simple_loss=0.2721, pruned_loss=0.06704, over 4806.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2533, pruned_loss=0.0573, over 954446.48 frames. ], batch size: 41, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:38:18,428 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=77027.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 05:38:30,331 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.907e+01 1.587e+02 1.892e+02 2.346e+02 6.910e+02, threshold=3.784e+02, percent-clipped=5.0 +2023-04-27 05:38:31,684 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6164, 3.2175, 2.7154, 2.9956, 2.3070, 2.7580, 2.7558, 2.2351], + device='cuda:0'), covar=tensor([0.1952, 0.1125, 0.0803, 0.1225, 0.2701, 0.1076, 0.1967, 0.2388], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0310, 0.0225, 0.0283, 0.0312, 0.0264, 0.0254, 0.0270], + device='cuda:0'), out_proj_covar=tensor([1.1712e-04, 1.2333e-04, 8.9707e-05, 1.1298e-04, 1.2704e-04, 1.0572e-04, + 1.0295e-04, 1.0784e-04], device='cuda:0') +2023-04-27 05:38:34,642 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5763, 1.2584, 4.1409, 3.5809, 3.7115, 3.9018, 3.6893, 3.3821], + device='cuda:0'), covar=tensor([0.9514, 0.8593, 0.1755, 0.3314, 0.2142, 0.3137, 0.2613, 0.3427], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0306, 0.0400, 0.0403, 0.0344, 0.0402, 0.0313, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:38:40,002 INFO [finetune.py:976] (0/7) Epoch 14, batch 2600, loss[loss=0.2031, simple_loss=0.2702, pruned_loss=0.06801, over 4816.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2543, pruned_loss=0.05762, over 955153.66 frames. ], batch size: 25, lr: 3.55e-03, grad_scale: 64.0 +2023-04-27 05:39:20,515 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1848, 2.9937, 0.8550, 1.6986, 1.6245, 2.1276, 1.7464, 0.9266], + device='cuda:0'), covar=tensor([0.1522, 0.1030, 0.1992, 0.1222, 0.1125, 0.0986, 0.1500, 0.2151], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0245, 0.0138, 0.0120, 0.0131, 0.0153, 0.0118, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 05:39:20,545 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77095.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:39:41,102 INFO [finetune.py:976] (0/7) Epoch 14, batch 2650, loss[loss=0.1869, simple_loss=0.2565, pruned_loss=0.0586, over 4853.00 frames. ], tot_loss[loss=0.1851, simple_loss=0.2548, pruned_loss=0.05764, over 954449.65 frames. ], batch size: 31, lr: 3.54e-03, grad_scale: 64.0 +2023-04-27 05:39:44,308 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5521, 1.4783, 1.7792, 1.8790, 1.4161, 1.3060, 1.5098, 1.0631], + device='cuda:0'), covar=tensor([0.0698, 0.0817, 0.0438, 0.0567, 0.0823, 0.1166, 0.0659, 0.0681], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0070, 0.0068, 0.0075, 0.0096, 0.0076, 0.0070], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 05:40:19,438 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.638e+02 1.948e+02 2.279e+02 4.007e+02, threshold=3.896e+02, percent-clipped=1.0 +2023-04-27 05:40:26,289 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77156.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 05:40:29,225 INFO [finetune.py:976] (0/7) Epoch 14, batch 2700, loss[loss=0.169, simple_loss=0.2328, pruned_loss=0.05264, over 4689.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2529, pruned_loss=0.05639, over 954434.53 frames. ], batch size: 23, lr: 3.54e-03, grad_scale: 64.0 +2023-04-27 05:40:30,512 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6823, 1.5452, 0.7520, 1.3775, 1.7391, 1.5639, 1.4372, 1.4933], + device='cuda:0'), covar=tensor([0.0480, 0.0355, 0.0355, 0.0519, 0.0268, 0.0473, 0.0444, 0.0536], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0025, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 05:40:42,869 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.1132, 4.0895, 3.0428, 4.7697, 4.0815, 4.1005, 1.7299, 4.1247], + device='cuda:0'), covar=tensor([0.1491, 0.1062, 0.3422, 0.1096, 0.3227, 0.1608, 0.5584, 0.2131], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0212, 0.0248, 0.0302, 0.0296, 0.0245, 0.0271, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 05:40:44,650 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4298, 1.6910, 1.7944, 1.9421, 1.8813, 1.9356, 1.9355, 1.8973], + device='cuda:0'), covar=tensor([0.4098, 0.5790, 0.5093, 0.4798, 0.5840, 0.8435, 0.5450, 0.5354], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0377, 0.0317, 0.0330, 0.0343, 0.0401, 0.0358, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 05:40:53,252 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77195.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:41:03,078 INFO [finetune.py:976] (0/7) Epoch 14, batch 2750, loss[loss=0.1448, simple_loss=0.2178, pruned_loss=0.03584, over 4703.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2502, pruned_loss=0.05582, over 953174.09 frames. ], batch size: 23, lr: 3.54e-03, grad_scale: 64.0 +2023-04-27 05:41:03,200 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77211.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:41:37,791 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.631e+02 1.907e+02 2.391e+02 4.996e+02, threshold=3.813e+02, percent-clipped=2.0 +2023-04-27 05:41:48,207 INFO [finetune.py:976] (0/7) Epoch 14, batch 2800, loss[loss=0.2195, simple_loss=0.266, pruned_loss=0.0865, over 4017.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2464, pruned_loss=0.05426, over 952344.50 frames. ], batch size: 17, lr: 3.54e-03, grad_scale: 64.0 +2023-04-27 05:41:52,773 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-27 05:41:55,156 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77272.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:41:55,747 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77273.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:41:58,635 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77277.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:42:22,266 INFO [finetune.py:976] (0/7) Epoch 14, batch 2850, loss[loss=0.14, simple_loss=0.2088, pruned_loss=0.03564, over 4750.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2452, pruned_loss=0.05425, over 954010.94 frames. ], batch size: 26, lr: 3.54e-03, grad_scale: 64.0 +2023-04-27 05:42:22,491 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.36 vs. limit=5.0 +2023-04-27 05:42:27,391 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-27 05:42:37,450 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77334.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:42:39,863 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77338.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:42:44,406 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.666e+02 1.986e+02 2.404e+02 4.400e+02, threshold=3.972e+02, percent-clipped=2.0 +2023-04-27 05:42:49,348 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-27 05:42:55,656 INFO [finetune.py:976] (0/7) Epoch 14, batch 2900, loss[loss=0.1756, simple_loss=0.2457, pruned_loss=0.05276, over 4758.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2488, pruned_loss=0.05607, over 953101.22 frames. ], batch size: 26, lr: 3.54e-03, grad_scale: 64.0 +2023-04-27 05:43:29,377 INFO [finetune.py:976] (0/7) Epoch 14, batch 2950, loss[loss=0.2131, simple_loss=0.2748, pruned_loss=0.07572, over 4918.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2525, pruned_loss=0.05769, over 950425.23 frames. ], batch size: 38, lr: 3.54e-03, grad_scale: 64.0 +2023-04-27 05:43:50,925 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.615e+02 1.939e+02 2.289e+02 5.440e+02, threshold=3.878e+02, percent-clipped=2.0 +2023-04-27 05:43:55,610 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77451.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 05:44:03,066 INFO [finetune.py:976] (0/7) Epoch 14, batch 3000, loss[loss=0.1955, simple_loss=0.2725, pruned_loss=0.05924, over 4865.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2532, pruned_loss=0.05778, over 952108.69 frames. ], batch size: 34, lr: 3.54e-03, grad_scale: 64.0 +2023-04-27 05:44:03,067 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 05:44:08,020 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9229, 1.7074, 1.9070, 2.2272, 2.2796, 1.9321, 1.3802, 2.0442], + device='cuda:0'), covar=tensor([0.0789, 0.1255, 0.0771, 0.0537, 0.0606, 0.0764, 0.0760, 0.0508], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0204, 0.0184, 0.0174, 0.0180, 0.0184, 0.0156, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:44:11,484 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7609, 1.0730, 1.7219, 2.2637, 1.8993, 1.7013, 1.7365, 1.7144], + device='cuda:0'), covar=tensor([0.5156, 0.7447, 0.7505, 0.6905, 0.6946, 0.8877, 0.8773, 0.8620], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0406, 0.0494, 0.0510, 0.0442, 0.0464, 0.0470, 0.0473], + device='cuda:0'), out_proj_covar=tensor([9.9870e-05, 1.0070e-04, 1.1121e-04, 1.2118e-04, 1.0647e-04, 1.1165e-04, + 1.1230e-04, 1.1264e-04], device='cuda:0') +2023-04-27 05:44:19,535 INFO [finetune.py:1010] (0/7) Epoch 14, validation: loss=0.1527, simple_loss=0.224, pruned_loss=0.04073, over 2265189.00 frames. +2023-04-27 05:44:19,535 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 05:44:20,431 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-27 05:45:02,921 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77495.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:45:24,239 INFO [finetune.py:976] (0/7) Epoch 14, batch 3050, loss[loss=0.191, simple_loss=0.2757, pruned_loss=0.05311, over 4879.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2532, pruned_loss=0.05719, over 953390.75 frames. ], batch size: 32, lr: 3.54e-03, grad_scale: 64.0 +2023-04-27 05:45:39,067 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1506, 2.2033, 1.9350, 1.9540, 2.4683, 1.8807, 2.8572, 1.6629], + device='cuda:0'), covar=tensor([0.4323, 0.2083, 0.4508, 0.3348, 0.1598, 0.2883, 0.1743, 0.4693], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0349, 0.0431, 0.0357, 0.0384, 0.0386, 0.0374, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:45:42,117 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4708, 3.1132, 2.6884, 2.9164, 2.4022, 2.7220, 2.7515, 2.0649], + device='cuda:0'), covar=tensor([0.2430, 0.1628, 0.0819, 0.1400, 0.3023, 0.1308, 0.2438, 0.3005], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0309, 0.0223, 0.0282, 0.0310, 0.0263, 0.0253, 0.0269], + device='cuda:0'), out_proj_covar=tensor([1.1667e-04, 1.2300e-04, 8.8984e-05, 1.1244e-04, 1.2658e-04, 1.0525e-04, + 1.0248e-04, 1.0735e-04], device='cuda:0') +2023-04-27 05:45:45,779 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=77543.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:45:46,941 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.289e+02 1.813e+02 2.136e+02 2.467e+02 4.229e+02, threshold=4.273e+02, percent-clipped=1.0 +2023-04-27 05:45:56,024 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8425, 2.0548, 2.0347, 2.2079, 1.9596, 2.1318, 2.1109, 2.0200], + device='cuda:0'), covar=tensor([0.4554, 0.7092, 0.6164, 0.5407, 0.6656, 0.8154, 0.7296, 0.6379], + device='cuda:0'), in_proj_covar=tensor([0.0331, 0.0376, 0.0316, 0.0330, 0.0342, 0.0400, 0.0356, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 05:45:57,727 INFO [finetune.py:976] (0/7) Epoch 14, batch 3100, loss[loss=0.1591, simple_loss=0.233, pruned_loss=0.04255, over 4927.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2501, pruned_loss=0.05554, over 954339.39 frames. ], batch size: 33, lr: 3.54e-03, grad_scale: 64.0 +2023-04-27 05:46:01,903 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77567.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:46:17,303 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0653, 1.4427, 1.4284, 1.7442, 1.5972, 1.8655, 1.3369, 3.1122], + device='cuda:0'), covar=tensor([0.0636, 0.0813, 0.0766, 0.1147, 0.0626, 0.0421, 0.0708, 0.0155], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 05:46:36,287 INFO [finetune.py:976] (0/7) Epoch 14, batch 3150, loss[loss=0.1805, simple_loss=0.2478, pruned_loss=0.05658, over 4833.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2484, pruned_loss=0.05534, over 955759.73 frames. ], batch size: 41, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:46:44,724 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77616.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:47:05,396 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77629.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:47:07,848 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77633.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:47:20,986 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.865e+01 1.595e+02 1.944e+02 2.388e+02 4.637e+02, threshold=3.889e+02, percent-clipped=1.0 +2023-04-27 05:47:29,103 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4647, 3.0008, 0.9506, 1.6772, 1.7903, 2.1436, 1.7858, 0.9071], + device='cuda:0'), covar=tensor([0.1460, 0.0932, 0.1873, 0.1239, 0.1080, 0.1041, 0.1593, 0.1986], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0246, 0.0139, 0.0121, 0.0132, 0.0153, 0.0119, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 05:47:41,184 INFO [finetune.py:976] (0/7) Epoch 14, batch 3200, loss[loss=0.1738, simple_loss=0.2381, pruned_loss=0.05475, over 4795.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.246, pruned_loss=0.05481, over 955917.20 frames. ], batch size: 51, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:48:05,074 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:48:05,086 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77677.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:48:48,230 INFO [finetune.py:976] (0/7) Epoch 14, batch 3250, loss[loss=0.1983, simple_loss=0.2689, pruned_loss=0.0638, over 4929.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2488, pruned_loss=0.05661, over 958072.54 frames. ], batch size: 38, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:49:06,387 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3505, 2.2444, 2.6491, 2.7378, 2.9221, 2.2797, 1.8560, 2.3725], + device='cuda:0'), covar=tensor([0.0988, 0.0936, 0.0567, 0.0667, 0.0609, 0.0875, 0.0789, 0.0606], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0202, 0.0181, 0.0172, 0.0177, 0.0182, 0.0153, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:49:28,785 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=77738.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:49:33,561 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.660e+02 2.088e+02 2.486e+02 4.990e+02, threshold=4.175e+02, percent-clipped=6.0 +2023-04-27 05:49:36,698 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77751.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 05:49:42,680 INFO [finetune.py:976] (0/7) Epoch 14, batch 3300, loss[loss=0.175, simple_loss=0.2433, pruned_loss=0.05337, over 4822.00 frames. ], tot_loss[loss=0.1826, simple_loss=0.2509, pruned_loss=0.05714, over 954062.14 frames. ], batch size: 30, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:49:55,307 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2415, 1.2601, 1.3804, 1.5713, 1.6828, 1.3746, 0.9403, 1.4684], + device='cuda:0'), covar=tensor([0.0996, 0.1375, 0.0841, 0.0668, 0.0739, 0.0831, 0.0849, 0.0639], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0202, 0.0182, 0.0172, 0.0178, 0.0182, 0.0154, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:50:08,406 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=77799.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:50:15,837 INFO [finetune.py:976] (0/7) Epoch 14, batch 3350, loss[loss=0.2056, simple_loss=0.2641, pruned_loss=0.0735, over 4796.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2536, pruned_loss=0.0578, over 955146.75 frames. ], batch size: 45, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:50:21,265 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7494, 3.8068, 2.8454, 4.4249, 3.9300, 3.7231, 1.8171, 3.6419], + device='cuda:0'), covar=tensor([0.1810, 0.1373, 0.2756, 0.1626, 0.2875, 0.1939, 0.5386, 0.2526], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0215, 0.0251, 0.0305, 0.0300, 0.0248, 0.0273, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 05:50:39,908 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.645e+02 1.956e+02 2.306e+02 5.075e+02, threshold=3.912e+02, percent-clipped=1.0 +2023-04-27 05:50:49,112 INFO [finetune.py:976] (0/7) Epoch 14, batch 3400, loss[loss=0.2173, simple_loss=0.2846, pruned_loss=0.07493, over 4927.00 frames. ], tot_loss[loss=0.1869, simple_loss=0.2556, pruned_loss=0.0591, over 955703.58 frames. ], batch size: 38, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:50:52,859 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77867.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:51:05,460 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-27 05:51:22,441 INFO [finetune.py:976] (0/7) Epoch 14, batch 3450, loss[loss=0.1637, simple_loss=0.2327, pruned_loss=0.04737, over 4864.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2555, pruned_loss=0.05854, over 955441.10 frames. ], batch size: 31, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:51:24,870 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=77915.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:51:32,205 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.62 vs. limit=2.0 +2023-04-27 05:51:33,836 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77929.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:51:36,793 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=77933.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:51:45,494 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.602e+02 1.891e+02 2.396e+02 5.235e+02, threshold=3.783e+02, percent-clipped=1.0 +2023-04-27 05:51:54,677 INFO [finetune.py:976] (0/7) Epoch 14, batch 3500, loss[loss=0.2156, simple_loss=0.2729, pruned_loss=0.07914, over 4213.00 frames. ], tot_loss[loss=0.1849, simple_loss=0.2537, pruned_loss=0.05805, over 955484.18 frames. ], batch size: 65, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:51:56,586 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8978, 1.9528, 1.7987, 1.5623, 2.1081, 1.6420, 2.5751, 1.6009], + device='cuda:0'), covar=tensor([0.4032, 0.1948, 0.5194, 0.3223, 0.1745, 0.2778, 0.1506, 0.4855], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0348, 0.0428, 0.0357, 0.0382, 0.0383, 0.0374, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:52:01,406 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=77972.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:52:04,448 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=77977.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:52:07,434 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=77981.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:52:09,332 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2940, 1.1532, 1.2920, 1.5260, 1.6802, 1.3277, 0.9460, 1.4758], + device='cuda:0'), covar=tensor([0.0788, 0.1469, 0.0900, 0.0587, 0.0593, 0.0713, 0.0799, 0.0551], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0203, 0.0183, 0.0173, 0.0179, 0.0183, 0.0154, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:52:21,134 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-78000.pt +2023-04-27 05:52:28,915 INFO [finetune.py:976] (0/7) Epoch 14, batch 3550, loss[loss=0.1769, simple_loss=0.2423, pruned_loss=0.0558, over 4937.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2511, pruned_loss=0.05731, over 956743.32 frames. ], batch size: 38, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:52:31,560 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-27 05:52:42,974 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78033.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:52:56,888 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.685e+02 1.967e+02 2.304e+02 5.361e+02, threshold=3.934e+02, percent-clipped=3.0 +2023-04-27 05:53:05,575 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 05:53:17,515 INFO [finetune.py:976] (0/7) Epoch 14, batch 3600, loss[loss=0.1891, simple_loss=0.2572, pruned_loss=0.06045, over 4910.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2491, pruned_loss=0.05686, over 956696.45 frames. ], batch size: 37, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:54:15,143 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-27 05:54:18,584 INFO [finetune.py:976] (0/7) Epoch 14, batch 3650, loss[loss=0.1599, simple_loss=0.2382, pruned_loss=0.04074, over 4769.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2502, pruned_loss=0.05675, over 956649.92 frames. ], batch size: 28, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:54:56,789 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.566e+02 1.842e+02 2.272e+02 4.887e+02, threshold=3.683e+02, percent-clipped=1.0 +2023-04-27 05:55:18,207 INFO [finetune.py:976] (0/7) Epoch 14, batch 3700, loss[loss=0.2356, simple_loss=0.3111, pruned_loss=0.08, over 4840.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2544, pruned_loss=0.05817, over 955798.14 frames. ], batch size: 49, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:55:44,099 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-27 05:55:50,534 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6620, 1.0067, 1.6593, 2.1307, 1.7640, 1.5663, 1.6289, 1.6396], + device='cuda:0'), covar=tensor([0.4766, 0.6733, 0.6050, 0.6179, 0.6251, 0.8134, 0.7902, 0.8324], + device='cuda:0'), in_proj_covar=tensor([0.0412, 0.0406, 0.0494, 0.0509, 0.0442, 0.0464, 0.0471, 0.0473], + device='cuda:0'), out_proj_covar=tensor([9.9698e-05, 1.0050e-04, 1.1113e-04, 1.2094e-04, 1.0667e-04, 1.1174e-04, + 1.1232e-04, 1.1275e-04], device='cuda:0') +2023-04-27 05:55:56,843 INFO [finetune.py:976] (0/7) Epoch 14, batch 3750, loss[loss=0.1811, simple_loss=0.2576, pruned_loss=0.05233, over 4812.00 frames. ], tot_loss[loss=0.186, simple_loss=0.2554, pruned_loss=0.05826, over 953915.01 frames. ], batch size: 39, lr: 3.54e-03, grad_scale: 32.0 +2023-04-27 05:55:58,168 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5858, 1.3686, 4.2782, 3.9687, 3.7024, 4.0357, 3.9650, 3.7738], + device='cuda:0'), covar=tensor([0.6856, 0.5374, 0.0974, 0.1863, 0.1202, 0.1241, 0.1369, 0.1391], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0306, 0.0403, 0.0405, 0.0347, 0.0405, 0.0313, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 05:56:09,777 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78232.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:56:18,615 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.305e+02 1.679e+02 1.977e+02 2.535e+02 5.677e+02, threshold=3.954e+02, percent-clipped=2.0 +2023-04-27 05:56:19,444 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.91 vs. limit=5.0 +2023-04-27 05:56:30,168 INFO [finetune.py:976] (0/7) Epoch 14, batch 3800, loss[loss=0.1814, simple_loss=0.2566, pruned_loss=0.05307, over 4827.00 frames. ], tot_loss[loss=0.1868, simple_loss=0.2561, pruned_loss=0.05873, over 951234.29 frames. ], batch size: 39, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 05:56:37,433 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78272.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:56:42,300 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.8212, 4.7211, 3.1938, 5.4755, 4.7470, 4.7277, 2.2044, 4.6166], + device='cuda:0'), covar=tensor([0.1441, 0.0829, 0.2838, 0.0748, 0.2635, 0.1422, 0.5507, 0.2183], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0216, 0.0252, 0.0306, 0.0300, 0.0249, 0.0273, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 05:56:50,284 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78293.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 05:57:03,399 INFO [finetune.py:976] (0/7) Epoch 14, batch 3850, loss[loss=0.1828, simple_loss=0.2606, pruned_loss=0.05246, over 4824.00 frames. ], tot_loss[loss=0.1861, simple_loss=0.2557, pruned_loss=0.05827, over 952067.90 frames. ], batch size: 47, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 05:57:09,824 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=78320.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:57:17,856 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78333.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:57:25,784 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.080e+02 1.524e+02 1.884e+02 2.249e+02 3.539e+02, threshold=3.767e+02, percent-clipped=0.0 +2023-04-27 05:57:36,828 INFO [finetune.py:976] (0/7) Epoch 14, batch 3900, loss[loss=0.1788, simple_loss=0.2519, pruned_loss=0.05285, over 4827.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2523, pruned_loss=0.05719, over 953815.52 frames. ], batch size: 38, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 05:57:47,572 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 05:57:50,413 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=78381.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:57:55,982 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0259, 2.5016, 0.9278, 1.3095, 2.0435, 1.1640, 3.3375, 1.6235], + device='cuda:0'), covar=tensor([0.0680, 0.0582, 0.0757, 0.1261, 0.0456, 0.1028, 0.0230, 0.0650], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0051, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 05:58:09,812 INFO [finetune.py:976] (0/7) Epoch 14, batch 3950, loss[loss=0.1904, simple_loss=0.2578, pruned_loss=0.0615, over 4906.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2503, pruned_loss=0.05663, over 954935.24 frames. ], batch size: 37, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 05:58:50,266 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.090e+02 1.573e+02 1.912e+02 2.257e+02 5.641e+02, threshold=3.824e+02, percent-clipped=2.0 +2023-04-27 05:58:57,406 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.0789, 2.1825, 2.1844, 2.8850, 2.9796, 2.5445, 2.4782, 2.1031], + device='cuda:0'), covar=tensor([0.1245, 0.1518, 0.1786, 0.1333, 0.1044, 0.1531, 0.2310, 0.1826], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0315, 0.0352, 0.0291, 0.0328, 0.0313, 0.0303, 0.0360], + device='cuda:0'), out_proj_covar=tensor([6.3445e-05, 6.6129e-05, 7.5375e-05, 5.9373e-05, 6.8149e-05, 6.6401e-05, + 6.3984e-05, 7.6868e-05], device='cuda:0') +2023-04-27 05:59:04,624 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78460.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 05:59:05,119 INFO [finetune.py:976] (0/7) Epoch 14, batch 4000, loss[loss=0.2185, simple_loss=0.281, pruned_loss=0.07799, over 4775.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2494, pruned_loss=0.05627, over 954613.28 frames. ], batch size: 54, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:00:00,328 INFO [finetune.py:976] (0/7) Epoch 14, batch 4050, loss[loss=0.2545, simple_loss=0.2976, pruned_loss=0.1057, over 4931.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2532, pruned_loss=0.05821, over 955823.55 frames. ], batch size: 38, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:00:19,640 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78521.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:00:19,664 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7752, 2.2925, 1.7546, 1.5958, 1.3072, 1.3395, 1.7895, 1.2341], + device='cuda:0'), covar=tensor([0.1651, 0.1402, 0.1580, 0.1792, 0.2406, 0.2013, 0.1114, 0.2171], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0213, 0.0168, 0.0204, 0.0201, 0.0184, 0.0157, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 06:00:51,058 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.238e+02 1.700e+02 2.035e+02 2.375e+02 5.106e+02, threshold=4.071e+02, percent-clipped=4.0 +2023-04-27 06:00:54,790 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0641, 2.4205, 0.9154, 1.3829, 1.9481, 1.2985, 3.3672, 1.7506], + device='cuda:0'), covar=tensor([0.0663, 0.0573, 0.0769, 0.1268, 0.0478, 0.1023, 0.0276, 0.0663], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 06:01:03,056 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78556.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:01:06,002 INFO [finetune.py:976] (0/7) Epoch 14, batch 4100, loss[loss=0.1719, simple_loss=0.2446, pruned_loss=0.04961, over 4902.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2539, pruned_loss=0.05787, over 955179.78 frames. ], batch size: 37, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:01:45,270 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78588.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 06:02:05,268 INFO [finetune.py:976] (0/7) Epoch 14, batch 4150, loss[loss=0.2085, simple_loss=0.2667, pruned_loss=0.07515, over 4827.00 frames. ], tot_loss[loss=0.1858, simple_loss=0.2552, pruned_loss=0.05823, over 955204.36 frames. ], batch size: 30, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:02:09,561 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=78617.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:02:09,681 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-27 06:02:29,493 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.351e+02 1.709e+02 1.960e+02 2.352e+02 3.930e+02, threshold=3.920e+02, percent-clipped=0.0 +2023-04-27 06:02:33,340 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-27 06:02:38,717 INFO [finetune.py:976] (0/7) Epoch 14, batch 4200, loss[loss=0.1915, simple_loss=0.2712, pruned_loss=0.05586, over 4738.00 frames. ], tot_loss[loss=0.1866, simple_loss=0.2563, pruned_loss=0.05843, over 954900.64 frames. ], batch size: 54, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:03:12,003 INFO [finetune.py:976] (0/7) Epoch 14, batch 4250, loss[loss=0.176, simple_loss=0.2347, pruned_loss=0.05861, over 4904.00 frames. ], tot_loss[loss=0.1856, simple_loss=0.2549, pruned_loss=0.05816, over 954525.56 frames. ], batch size: 36, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:03:36,178 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.058e+02 1.588e+02 1.905e+02 2.240e+02 4.270e+02, threshold=3.810e+02, percent-clipped=2.0 +2023-04-27 06:03:45,440 INFO [finetune.py:976] (0/7) Epoch 14, batch 4300, loss[loss=0.173, simple_loss=0.2315, pruned_loss=0.0573, over 4915.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2506, pruned_loss=0.05602, over 954692.46 frames. ], batch size: 43, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:04:17,613 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1867, 2.6428, 2.1881, 2.4759, 1.8240, 2.1724, 2.1296, 1.5982], + device='cuda:0'), covar=tensor([0.1995, 0.1181, 0.0867, 0.1231, 0.3346, 0.1114, 0.1900, 0.2727], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0311, 0.0225, 0.0283, 0.0313, 0.0265, 0.0254, 0.0270], + device='cuda:0'), out_proj_covar=tensor([1.1741e-04, 1.2366e-04, 8.9743e-05, 1.1293e-04, 1.2764e-04, 1.0613e-04, + 1.0281e-04, 1.0778e-04], device='cuda:0') +2023-04-27 06:04:19,291 INFO [finetune.py:976] (0/7) Epoch 14, batch 4350, loss[loss=0.2027, simple_loss=0.2657, pruned_loss=0.06983, over 4850.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.248, pruned_loss=0.05574, over 954400.78 frames. ], batch size: 47, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:04:22,429 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78816.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:04:43,555 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.581e+02 1.858e+02 2.218e+02 4.471e+02, threshold=3.716e+02, percent-clipped=1.0 +2023-04-27 06:05:04,243 INFO [finetune.py:976] (0/7) Epoch 14, batch 4400, loss[loss=0.1784, simple_loss=0.2578, pruned_loss=0.04951, over 4904.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2484, pruned_loss=0.0557, over 954687.12 frames. ], batch size: 37, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:05:38,842 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=78888.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:06:11,507 INFO [finetune.py:976] (0/7) Epoch 14, batch 4450, loss[loss=0.191, simple_loss=0.2676, pruned_loss=0.05724, over 4767.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2511, pruned_loss=0.05618, over 955551.74 frames. ], batch size: 54, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:06:12,201 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=78912.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:06:22,581 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9521, 2.3986, 0.9822, 1.3450, 1.7596, 1.2028, 2.9817, 1.7188], + device='cuda:0'), covar=tensor([0.0711, 0.0716, 0.0819, 0.1262, 0.0505, 0.1037, 0.0327, 0.0600], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0067, 0.0049, 0.0047, 0.0050, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 06:06:44,161 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=78936.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:06:57,527 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-27 06:07:03,711 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.091e+02 1.730e+02 2.046e+02 2.407e+02 5.260e+02, threshold=4.092e+02, percent-clipped=6.0 +2023-04-27 06:07:18,900 INFO [finetune.py:976] (0/7) Epoch 14, batch 4500, loss[loss=0.2022, simple_loss=0.2804, pruned_loss=0.06206, over 4817.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.2534, pruned_loss=0.0571, over 955301.09 frames. ], batch size: 47, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:07:21,398 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=78965.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:07:52,195 INFO [finetune.py:976] (0/7) Epoch 14, batch 4550, loss[loss=0.1727, simple_loss=0.2501, pruned_loss=0.04767, over 4738.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2552, pruned_loss=0.05793, over 954658.67 frames. ], batch size: 54, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:08:01,458 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79026.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:08:14,995 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.614e+01 1.580e+02 1.894e+02 2.383e+02 3.819e+02, threshold=3.787e+02, percent-clipped=0.0 +2023-04-27 06:08:26,109 INFO [finetune.py:976] (0/7) Epoch 14, batch 4600, loss[loss=0.1575, simple_loss=0.2282, pruned_loss=0.04336, over 4923.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2539, pruned_loss=0.05681, over 954812.30 frames. ], batch size: 38, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:08:27,958 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3879, 1.3860, 4.2011, 3.9388, 3.7294, 4.0360, 3.9539, 3.7326], + device='cuda:0'), covar=tensor([0.6913, 0.5461, 0.0853, 0.1458, 0.0882, 0.1472, 0.1263, 0.1280], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0306, 0.0402, 0.0407, 0.0347, 0.0407, 0.0312, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:08:59,227 INFO [finetune.py:976] (0/7) Epoch 14, batch 4650, loss[loss=0.1248, simple_loss=0.208, pruned_loss=0.02079, over 4781.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2516, pruned_loss=0.05668, over 953658.63 frames. ], batch size: 29, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:08:59,998 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7041, 1.2440, 1.7803, 2.2223, 1.8323, 1.6850, 1.7875, 1.7608], + device='cuda:0'), covar=tensor([0.5105, 0.6769, 0.6860, 0.6311, 0.6107, 0.8156, 0.8103, 0.8142], + device='cuda:0'), in_proj_covar=tensor([0.0415, 0.0406, 0.0495, 0.0509, 0.0444, 0.0465, 0.0472, 0.0474], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:09:02,379 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79116.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:09:21,595 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.186e+02 1.677e+02 1.942e+02 2.274e+02 5.469e+02, threshold=3.883e+02, percent-clipped=3.0 +2023-04-27 06:09:32,657 INFO [finetune.py:976] (0/7) Epoch 14, batch 4700, loss[loss=0.1684, simple_loss=0.2231, pruned_loss=0.05689, over 4834.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.248, pruned_loss=0.05567, over 955025.38 frames. ], batch size: 40, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:09:34,536 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=79164.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:10:05,832 INFO [finetune.py:976] (0/7) Epoch 14, batch 4750, loss[loss=0.2233, simple_loss=0.2839, pruned_loss=0.08138, over 4873.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2456, pruned_loss=0.05479, over 953808.33 frames. ], batch size: 34, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:10:06,560 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79212.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:10:13,163 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6967, 1.3545, 1.8171, 2.1670, 1.8159, 1.6324, 1.7328, 1.6787], + device='cuda:0'), covar=tensor([0.4710, 0.7104, 0.6985, 0.6082, 0.6200, 0.8026, 0.8488, 0.9096], + device='cuda:0'), in_proj_covar=tensor([0.0415, 0.0407, 0.0497, 0.0511, 0.0445, 0.0466, 0.0473, 0.0477], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:10:37,980 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.597e+02 1.966e+02 2.340e+02 3.997e+02, threshold=3.932e+02, percent-clipped=2.0 +2023-04-27 06:10:58,674 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=79260.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:10:59,216 INFO [finetune.py:976] (0/7) Epoch 14, batch 4800, loss[loss=0.1762, simple_loss=0.2454, pruned_loss=0.0535, over 4875.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2482, pruned_loss=0.05621, over 952686.59 frames. ], batch size: 31, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:11:25,272 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79285.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 06:11:58,657 INFO [finetune.py:976] (0/7) Epoch 14, batch 4850, loss[loss=0.1464, simple_loss=0.228, pruned_loss=0.0324, over 4895.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2517, pruned_loss=0.05699, over 953226.97 frames. ], batch size: 43, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:12:10,417 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79321.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:12:23,680 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79333.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:12:34,615 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.1704, 2.3843, 2.2308, 2.3301, 1.9947, 2.2843, 2.3881, 2.2966], + device='cuda:0'), covar=tensor([0.3823, 0.5977, 0.5506, 0.5156, 0.6474, 0.8026, 0.6149, 0.6114], + device='cuda:0'), in_proj_covar=tensor([0.0335, 0.0379, 0.0319, 0.0334, 0.0343, 0.0402, 0.0359, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 06:12:36,864 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.635e+02 2.018e+02 2.486e+02 3.725e+02, threshold=4.037e+02, percent-clipped=0.0 +2023-04-27 06:12:37,003 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79346.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 06:12:55,204 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9464, 2.2762, 0.9699, 1.1659, 1.6189, 1.1829, 2.8947, 1.3625], + device='cuda:0'), covar=tensor([0.0671, 0.0723, 0.0776, 0.1268, 0.0504, 0.1011, 0.0272, 0.0722], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 06:12:56,926 INFO [finetune.py:976] (0/7) Epoch 14, batch 4900, loss[loss=0.1576, simple_loss=0.2281, pruned_loss=0.04352, over 4205.00 frames. ], tot_loss[loss=0.1848, simple_loss=0.2538, pruned_loss=0.05791, over 951167.83 frames. ], batch size: 66, lr: 3.53e-03, grad_scale: 32.0 +2023-04-27 06:12:59,745 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79364.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:13:07,474 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79375.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 06:13:19,223 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79394.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:13:29,999 INFO [finetune.py:976] (0/7) Epoch 14, batch 4950, loss[loss=0.2024, simple_loss=0.2672, pruned_loss=0.06883, over 4844.00 frames. ], tot_loss[loss=0.1859, simple_loss=0.2554, pruned_loss=0.05824, over 951260.11 frames. ], batch size: 44, lr: 3.52e-03, grad_scale: 32.0 +2023-04-27 06:13:39,244 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9069, 1.8335, 4.3751, 4.0313, 3.9144, 4.1275, 4.1337, 3.9425], + device='cuda:0'), covar=tensor([0.6259, 0.4914, 0.1278, 0.2111, 0.1189, 0.1588, 0.1007, 0.1482], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0307, 0.0403, 0.0407, 0.0349, 0.0407, 0.0313, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:13:40,482 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79425.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:13:47,622 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79436.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 06:13:53,550 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.668e+02 1.946e+02 2.385e+02 4.906e+02, threshold=3.893e+02, percent-clipped=2.0 +2023-04-27 06:14:03,221 INFO [finetune.py:976] (0/7) Epoch 14, batch 5000, loss[loss=0.1572, simple_loss=0.2272, pruned_loss=0.04358, over 4798.00 frames. ], tot_loss[loss=0.1842, simple_loss=0.2536, pruned_loss=0.05746, over 952554.50 frames. ], batch size: 25, lr: 3.52e-03, grad_scale: 32.0 +2023-04-27 06:14:13,817 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3665, 1.6254, 1.7076, 1.9845, 1.8567, 2.1863, 1.4857, 4.2700], + device='cuda:0'), covar=tensor([0.0572, 0.0756, 0.0780, 0.1192, 0.0636, 0.0494, 0.0722, 0.0107], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 06:14:35,550 INFO [finetune.py:976] (0/7) Epoch 14, batch 5050, loss[loss=0.1896, simple_loss=0.2618, pruned_loss=0.05869, over 4810.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2508, pruned_loss=0.05702, over 954236.19 frames. ], batch size: 39, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:14:59,515 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.519e+02 1.891e+02 2.332e+02 5.445e+02, threshold=3.783e+02, percent-clipped=1.0 +2023-04-27 06:15:08,088 INFO [finetune.py:976] (0/7) Epoch 14, batch 5100, loss[loss=0.2142, simple_loss=0.2609, pruned_loss=0.08376, over 4834.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2478, pruned_loss=0.05637, over 953684.06 frames. ], batch size: 33, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:15:14,003 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7544, 1.5989, 0.8141, 1.4359, 1.5829, 1.5953, 1.5151, 1.4935], + device='cuda:0'), covar=tensor([0.0514, 0.0400, 0.0399, 0.0594, 0.0304, 0.0536, 0.0518, 0.0627], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0023, 0.0029, 0.0020, 0.0028, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0038, 0.0050, 0.0037, 0.0049, 0.0048, 0.0050], + device='cuda:0') +2023-04-27 06:15:25,158 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79585.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:15:41,438 INFO [finetune.py:976] (0/7) Epoch 14, batch 5150, loss[loss=0.1667, simple_loss=0.2433, pruned_loss=0.04505, over 4894.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2468, pruned_loss=0.05575, over 953362.59 frames. ], batch size: 35, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:15:45,781 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5357, 2.3805, 1.9385, 2.0730, 2.4400, 1.9151, 3.1298, 1.7947], + device='cuda:0'), covar=tensor([0.3883, 0.2254, 0.4892, 0.3646, 0.2073, 0.3117, 0.2163, 0.4561], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0349, 0.0432, 0.0358, 0.0385, 0.0384, 0.0376, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:15:48,490 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79621.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:16:02,433 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79641.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 06:16:06,015 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79646.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:16:06,492 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.050e+02 1.596e+02 1.950e+02 2.248e+02 3.363e+02, threshold=3.899e+02, percent-clipped=0.0 +2023-04-27 06:16:12,611 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79657.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:16:14,973 INFO [finetune.py:976] (0/7) Epoch 14, batch 5200, loss[loss=0.1796, simple_loss=0.2556, pruned_loss=0.05179, over 4903.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2482, pruned_loss=0.056, over 952259.03 frames. ], batch size: 37, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:16:19,303 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3616, 1.4454, 1.3836, 1.6679, 1.5764, 1.8075, 1.3287, 3.1522], + device='cuda:0'), covar=tensor([0.0596, 0.0757, 0.0808, 0.1190, 0.0629, 0.0558, 0.0733, 0.0171], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 06:16:19,851 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=79669.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:16:32,070 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3021, 1.6035, 1.3582, 1.4876, 1.3615, 1.2594, 1.3361, 1.0874], + device='cuda:0'), covar=tensor([0.1534, 0.1374, 0.0944, 0.1146, 0.3396, 0.1339, 0.1676, 0.2136], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0310, 0.0224, 0.0282, 0.0312, 0.0264, 0.0254, 0.0269], + device='cuda:0'), out_proj_covar=tensor([1.1691e-04, 1.2350e-04, 8.9088e-05, 1.1244e-04, 1.2696e-04, 1.0543e-04, + 1.0267e-04, 1.0729e-04], device='cuda:0') +2023-04-27 06:16:34,495 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79689.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:16:54,348 INFO [finetune.py:976] (0/7) Epoch 14, batch 5250, loss[loss=0.2129, simple_loss=0.2824, pruned_loss=0.07171, over 4811.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2515, pruned_loss=0.05722, over 951333.67 frames. ], batch size: 45, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:17:03,895 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79718.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:17:05,043 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79720.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:17:19,825 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79731.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 06:17:36,846 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.087e+02 1.641e+02 2.072e+02 2.610e+02 5.253e+02, threshold=4.143e+02, percent-clipped=2.0 +2023-04-27 06:17:45,876 INFO [finetune.py:976] (0/7) Epoch 14, batch 5300, loss[loss=0.1683, simple_loss=0.2496, pruned_loss=0.0435, over 4854.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2537, pruned_loss=0.05746, over 951252.69 frames. ], batch size: 31, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:17:46,572 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=79762.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:17:48,409 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4218, 2.6833, 2.1512, 2.1679, 2.4278, 2.0282, 3.5307, 1.8410], + device='cuda:0'), covar=tensor([0.4020, 0.1776, 0.4221, 0.3282, 0.2165, 0.2900, 0.1525, 0.4406], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0348, 0.0431, 0.0356, 0.0384, 0.0382, 0.0374, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:18:38,536 INFO [finetune.py:976] (0/7) Epoch 14, batch 5350, loss[loss=0.1365, simple_loss=0.2118, pruned_loss=0.03064, over 4755.00 frames. ], tot_loss[loss=0.1847, simple_loss=0.2544, pruned_loss=0.05751, over 951284.68 frames. ], batch size: 26, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:18:45,986 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=79823.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:19:02,342 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.086e+02 1.530e+02 1.860e+02 2.217e+02 4.388e+02, threshold=3.721e+02, percent-clipped=2.0 +2023-04-27 06:19:11,409 INFO [finetune.py:976] (0/7) Epoch 14, batch 5400, loss[loss=0.2006, simple_loss=0.261, pruned_loss=0.07007, over 4842.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2523, pruned_loss=0.05676, over 952067.17 frames. ], batch size: 49, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:19:35,718 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0665, 2.5398, 2.1424, 2.4226, 1.7639, 2.0909, 2.1061, 1.6956], + device='cuda:0'), covar=tensor([0.2093, 0.1369, 0.0839, 0.1281, 0.3134, 0.1214, 0.1863, 0.2529], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0311, 0.0224, 0.0282, 0.0311, 0.0264, 0.0253, 0.0269], + device='cuda:0'), out_proj_covar=tensor([1.1668e-04, 1.2377e-04, 8.9278e-05, 1.1246e-04, 1.2691e-04, 1.0550e-04, + 1.0243e-04, 1.0727e-04], device='cuda:0') +2023-04-27 06:19:40,003 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8455, 1.3312, 1.4237, 1.6561, 2.0305, 1.6027, 1.3953, 1.3320], + device='cuda:0'), covar=tensor([0.1724, 0.1632, 0.1840, 0.1229, 0.0852, 0.2090, 0.2622, 0.2323], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0318, 0.0354, 0.0291, 0.0331, 0.0316, 0.0306, 0.0363], + device='cuda:0'), out_proj_covar=tensor([6.4378e-05, 6.6603e-05, 7.5945e-05, 5.9476e-05, 6.8895e-05, 6.6787e-05, + 6.4609e-05, 7.7527e-05], device='cuda:0') +2023-04-27 06:19:45,248 INFO [finetune.py:976] (0/7) Epoch 14, batch 5450, loss[loss=0.1646, simple_loss=0.2328, pruned_loss=0.04815, over 4912.00 frames. ], tot_loss[loss=0.18, simple_loss=0.249, pruned_loss=0.0555, over 955003.89 frames. ], batch size: 37, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:19:58,369 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-27 06:20:04,590 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=79941.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:20:04,607 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79941.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 06:20:09,136 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.493e+02 1.714e+02 2.129e+02 3.310e+02, threshold=3.428e+02, percent-clipped=0.0 +2023-04-27 06:20:18,679 INFO [finetune.py:976] (0/7) Epoch 14, batch 5500, loss[loss=0.1884, simple_loss=0.2448, pruned_loss=0.06598, over 4821.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2455, pruned_loss=0.05439, over 955132.74 frames. ], batch size: 30, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:20:36,319 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=79989.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 06:20:36,337 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=79989.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:20:44,921 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-80000.pt +2023-04-27 06:20:53,195 INFO [finetune.py:976] (0/7) Epoch 14, batch 5550, loss[loss=0.1651, simple_loss=0.2418, pruned_loss=0.04417, over 4901.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2465, pruned_loss=0.05471, over 954521.28 frames. ], batch size: 43, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:20:54,499 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80013.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:20:59,229 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80020.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:21:06,014 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80031.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 06:21:09,685 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=80037.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:21:16,570 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.402e+01 1.777e+02 2.079e+02 2.504e+02 5.110e+02, threshold=4.158e+02, percent-clipped=3.0 +2023-04-27 06:21:24,823 INFO [finetune.py:976] (0/7) Epoch 14, batch 5600, loss[loss=0.1796, simple_loss=0.2523, pruned_loss=0.05343, over 4813.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2515, pruned_loss=0.05608, over 955780.81 frames. ], batch size: 40, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:21:28,976 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=80068.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:21:35,802 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=80079.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 06:21:45,019 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3377, 2.8135, 0.9336, 1.5908, 2.2025, 1.4408, 4.0905, 2.1436], + device='cuda:0'), covar=tensor([0.0660, 0.0762, 0.0873, 0.1266, 0.0553, 0.1019, 0.0284, 0.0590], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 06:21:50,935 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-27 06:21:50,982 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-27 06:22:01,084 INFO [finetune.py:976] (0/7) Epoch 14, batch 5650, loss[loss=0.1908, simple_loss=0.2588, pruned_loss=0.06141, over 4822.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2539, pruned_loss=0.05671, over 954831.44 frames. ], batch size: 25, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:22:11,103 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80118.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:22:17,877 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 06:22:28,366 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.596e+02 1.966e+02 2.394e+02 5.142e+02, threshold=3.932e+02, percent-clipped=2.0 +2023-04-27 06:22:47,951 INFO [finetune.py:976] (0/7) Epoch 14, batch 5700, loss[loss=0.1996, simple_loss=0.2455, pruned_loss=0.07687, over 4279.00 frames. ], tot_loss[loss=0.182, simple_loss=0.2509, pruned_loss=0.05656, over 938044.94 frames. ], batch size: 18, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:23:18,205 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-14.pt +2023-04-27 06:23:36,604 INFO [finetune.py:976] (0/7) Epoch 15, batch 0, loss[loss=0.2195, simple_loss=0.2915, pruned_loss=0.07373, over 4788.00 frames. ], tot_loss[loss=0.2195, simple_loss=0.2915, pruned_loss=0.07373, over 4788.00 frames. ], batch size: 51, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:23:36,605 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 06:23:38,923 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.7424, 1.8802, 1.7946, 1.2837, 1.9735, 1.5301, 2.4081, 1.5924], + device='cuda:0'), covar=tensor([0.3796, 0.1765, 0.5045, 0.3067, 0.1421, 0.2344, 0.1476, 0.4687], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0346, 0.0427, 0.0355, 0.0381, 0.0381, 0.0372, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:23:53,230 INFO [finetune.py:1010] (0/7) Epoch 15, validation: loss=0.1536, simple_loss=0.2258, pruned_loss=0.04063, over 2265189.00 frames. +2023-04-27 06:23:53,231 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 06:23:53,377 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4119, 1.7492, 2.2636, 2.8836, 2.1976, 1.7645, 1.7572, 2.1879], + device='cuda:0'), covar=tensor([0.3314, 0.3682, 0.1690, 0.2787, 0.2898, 0.2868, 0.4145, 0.2293], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0249, 0.0225, 0.0318, 0.0216, 0.0231, 0.0231, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 06:24:48,179 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4010, 2.7928, 0.8808, 1.5536, 2.0761, 1.4830, 4.0938, 2.0441], + device='cuda:0'), covar=tensor([0.0636, 0.0896, 0.0895, 0.1270, 0.0598, 0.1017, 0.0282, 0.0602], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 06:24:56,494 INFO [finetune.py:976] (0/7) Epoch 15, batch 50, loss[loss=0.1944, simple_loss=0.2679, pruned_loss=0.06041, over 4807.00 frames. ], tot_loss[loss=0.1903, simple_loss=0.2593, pruned_loss=0.06066, over 217768.69 frames. ], batch size: 40, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:24:59,882 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80241.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:24:59,934 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8273, 2.5758, 1.8643, 1.8565, 1.3403, 1.3375, 1.9699, 1.2991], + device='cuda:0'), covar=tensor([0.1719, 0.1335, 0.1477, 0.1691, 0.2338, 0.1930, 0.1064, 0.2097], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0212, 0.0168, 0.0203, 0.0199, 0.0183, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 06:25:08,720 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.528e+02 1.806e+02 2.187e+02 6.322e+02, threshold=3.611e+02, percent-clipped=3.0 +2023-04-27 06:25:08,835 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3459, 1.7250, 1.6868, 2.0398, 1.8241, 2.0101, 1.5661, 4.1665], + device='cuda:0'), covar=tensor([0.0600, 0.0780, 0.0781, 0.1131, 0.0652, 0.0520, 0.0698, 0.0125], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 06:25:29,858 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-27 06:25:36,399 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80283.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:25:40,293 INFO [finetune.py:976] (0/7) Epoch 15, batch 100, loss[loss=0.1625, simple_loss=0.2354, pruned_loss=0.04478, over 4717.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2512, pruned_loss=0.05784, over 380954.28 frames. ], batch size: 23, lr: 3.52e-03, grad_scale: 16.0 +2023-04-27 06:25:41,435 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=80289.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:25:55,283 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-27 06:25:56,996 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80313.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:26:12,986 INFO [finetune.py:976] (0/7) Epoch 15, batch 150, loss[loss=0.1313, simple_loss=0.2044, pruned_loss=0.02912, over 4818.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2465, pruned_loss=0.05668, over 507388.84 frames. ], batch size: 25, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:26:13,103 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7123, 1.5387, 0.8207, 1.3371, 1.7587, 1.5194, 1.4174, 1.4599], + device='cuda:0'), covar=tensor([0.0518, 0.0390, 0.0349, 0.0576, 0.0272, 0.0511, 0.0500, 0.0611], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0050, 0.0038, 0.0049, 0.0048, 0.0050], + device='cuda:0') +2023-04-27 06:26:18,198 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80344.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:26:20,364 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.178e+02 1.629e+02 1.838e+02 2.299e+02 3.632e+02, threshold=3.676e+02, percent-clipped=1.0 +2023-04-27 06:26:20,493 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0002, 1.9282, 1.6333, 1.4699, 2.0934, 1.6148, 2.4101, 1.4910], + device='cuda:0'), covar=tensor([0.3761, 0.1707, 0.5332, 0.3024, 0.1510, 0.2303, 0.1481, 0.4756], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0348, 0.0429, 0.0357, 0.0383, 0.0382, 0.0373, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:26:22,570 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-27 06:26:28,916 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=80361.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:26:46,348 INFO [finetune.py:976] (0/7) Epoch 15, batch 200, loss[loss=0.1895, simple_loss=0.2624, pruned_loss=0.05834, over 4805.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2441, pruned_loss=0.0556, over 607189.33 frames. ], batch size: 51, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:26:49,942 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80393.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:27:06,643 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80418.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:27:18,650 INFO [finetune.py:976] (0/7) Epoch 15, batch 250, loss[loss=0.2619, simple_loss=0.323, pruned_loss=0.1004, over 4806.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2484, pruned_loss=0.05669, over 685833.49 frames. ], batch size: 45, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:27:25,586 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.144e+02 1.693e+02 2.067e+02 2.411e+02 4.714e+02, threshold=4.133e+02, percent-clipped=3.0 +2023-04-27 06:27:31,413 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80454.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:27:36,876 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80463.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:27:38,598 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=80466.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:27:40,422 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6447, 1.5002, 2.0020, 2.0662, 1.5489, 1.3001, 1.6957, 1.1537], + device='cuda:0'), covar=tensor([0.0594, 0.0859, 0.0497, 0.0687, 0.0798, 0.1326, 0.0653, 0.0787], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0071, 0.0070, 0.0068, 0.0076, 0.0097, 0.0076, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 06:27:49,018 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.48 vs. limit=5.0 +2023-04-27 06:27:51,746 INFO [finetune.py:976] (0/7) Epoch 15, batch 300, loss[loss=0.1614, simple_loss=0.2412, pruned_loss=0.04079, over 4878.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2524, pruned_loss=0.05766, over 746275.46 frames. ], batch size: 32, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:27:55,992 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80494.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:28:14,202 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9572, 1.9093, 1.6690, 1.4914, 2.0720, 1.6478, 2.3878, 1.4716], + device='cuda:0'), covar=tensor([0.3570, 0.1725, 0.4792, 0.2775, 0.1499, 0.2259, 0.1338, 0.4852], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0350, 0.0432, 0.0357, 0.0385, 0.0384, 0.0375, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:28:27,734 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80524.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:28:28,332 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9288, 2.3318, 1.9274, 2.2640, 1.5853, 1.9306, 1.8700, 1.4965], + device='cuda:0'), covar=tensor([0.1676, 0.1126, 0.0705, 0.0915, 0.3204, 0.1063, 0.1826, 0.2285], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0308, 0.0222, 0.0279, 0.0310, 0.0261, 0.0252, 0.0267], + device='cuda:0'), out_proj_covar=tensor([1.1605e-04, 1.2258e-04, 8.8420e-05, 1.1089e-04, 1.2615e-04, 1.0438e-04, + 1.0201e-04, 1.0631e-04], device='cuda:0') +2023-04-27 06:28:36,206 INFO [finetune.py:976] (0/7) Epoch 15, batch 350, loss[loss=0.1975, simple_loss=0.2642, pruned_loss=0.06545, over 4806.00 frames. ], tot_loss[loss=0.1871, simple_loss=0.2562, pruned_loss=0.05903, over 794955.61 frames. ], batch size: 39, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:28:37,549 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1498, 2.6697, 2.1542, 2.5240, 1.7765, 2.2326, 2.1481, 1.6845], + device='cuda:0'), covar=tensor([0.1776, 0.1135, 0.0761, 0.1042, 0.2966, 0.1160, 0.1882, 0.2394], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0307, 0.0221, 0.0278, 0.0309, 0.0261, 0.0252, 0.0266], + device='cuda:0'), out_proj_covar=tensor([1.1582e-04, 1.2234e-04, 8.8270e-05, 1.1076e-04, 1.2598e-04, 1.0423e-04, + 1.0192e-04, 1.0616e-04], device='cuda:0') +2023-04-27 06:28:42,200 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.655e+02 1.989e+02 2.509e+02 3.787e+02, threshold=3.978e+02, percent-clipped=0.0 +2023-04-27 06:28:48,608 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80555.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:28:55,117 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-27 06:29:15,645 INFO [finetune.py:976] (0/7) Epoch 15, batch 400, loss[loss=0.1663, simple_loss=0.24, pruned_loss=0.04633, over 4770.00 frames. ], tot_loss[loss=0.1854, simple_loss=0.2551, pruned_loss=0.05789, over 830408.01 frames. ], batch size: 26, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:30:07,686 INFO [finetune.py:976] (0/7) Epoch 15, batch 450, loss[loss=0.1768, simple_loss=0.2448, pruned_loss=0.05437, over 4898.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.2534, pruned_loss=0.05763, over 858146.94 frames. ], batch size: 36, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:30:13,710 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80639.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:30:18,556 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.255e+02 1.612e+02 1.974e+02 2.306e+02 4.694e+02, threshold=3.949e+02, percent-clipped=1.0 +2023-04-27 06:31:13,677 INFO [finetune.py:976] (0/7) Epoch 15, batch 500, loss[loss=0.1839, simple_loss=0.2434, pruned_loss=0.06218, over 4862.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2511, pruned_loss=0.0567, over 879977.23 frames. ], batch size: 31, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:31:52,664 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4288, 1.3208, 1.6998, 1.6309, 1.3503, 1.1778, 1.3153, 0.8929], + device='cuda:0'), covar=tensor([0.0563, 0.0716, 0.0439, 0.0677, 0.0715, 0.1163, 0.0628, 0.0679], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0069, 0.0068, 0.0075, 0.0096, 0.0075, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 06:31:57,447 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7838, 1.3226, 5.3164, 5.0018, 4.6393, 5.0229, 4.6684, 4.7090], + device='cuda:0'), covar=tensor([0.7008, 0.6252, 0.0834, 0.1523, 0.1036, 0.1202, 0.1141, 0.1541], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0307, 0.0401, 0.0403, 0.0349, 0.0404, 0.0312, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:32:06,679 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80736.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:32:07,804 INFO [finetune.py:976] (0/7) Epoch 15, batch 550, loss[loss=0.1346, simple_loss=0.2031, pruned_loss=0.03309, over 4723.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2474, pruned_loss=0.05537, over 896616.35 frames. ], batch size: 23, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:32:09,150 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8174, 2.3923, 1.8462, 1.7357, 1.4985, 1.4349, 1.9443, 1.4021], + device='cuda:0'), covar=tensor([0.1323, 0.1108, 0.1230, 0.1583, 0.2062, 0.1786, 0.0840, 0.1828], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0212, 0.0168, 0.0203, 0.0200, 0.0184, 0.0156, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 06:32:13,247 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.610e+02 1.836e+02 2.201e+02 4.321e+02, threshold=3.672e+02, percent-clipped=1.0 +2023-04-27 06:32:14,546 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80749.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:32:32,592 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=80773.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:32:41,567 INFO [finetune.py:976] (0/7) Epoch 15, batch 600, loss[loss=0.1741, simple_loss=0.2478, pruned_loss=0.05025, over 4932.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2492, pruned_loss=0.05602, over 910762.26 frames. ], batch size: 42, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:32:47,153 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80797.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:32:59,001 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8154, 2.1279, 1.7724, 2.0716, 1.5191, 1.8559, 1.7725, 1.4516], + device='cuda:0'), covar=tensor([0.2156, 0.1694, 0.0916, 0.1355, 0.3414, 0.1350, 0.2225, 0.2814], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0312, 0.0224, 0.0281, 0.0312, 0.0263, 0.0254, 0.0270], + device='cuda:0'), out_proj_covar=tensor([1.1722e-04, 1.2427e-04, 8.9326e-05, 1.1186e-04, 1.2717e-04, 1.0513e-04, + 1.0298e-04, 1.0766e-04], device='cuda:0') +2023-04-27 06:33:03,034 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80819.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:33:12,689 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=80834.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:33:15,001 INFO [finetune.py:976] (0/7) Epoch 15, batch 650, loss[loss=0.1908, simple_loss=0.2539, pruned_loss=0.0638, over 4147.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2524, pruned_loss=0.05731, over 920380.23 frames. ], batch size: 65, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:33:19,960 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8264, 2.6907, 2.0986, 2.1224, 1.5009, 1.5056, 2.2226, 1.5517], + device='cuda:0'), covar=tensor([0.1378, 0.1357, 0.1180, 0.1456, 0.1886, 0.1623, 0.0754, 0.1699], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0213, 0.0169, 0.0204, 0.0201, 0.0185, 0.0157, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 06:33:20,403 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.224e+02 1.693e+02 1.997e+02 2.392e+02 3.553e+02, threshold=3.994e+02, percent-clipped=0.0 +2023-04-27 06:33:22,321 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=80850.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:33:48,173 INFO [finetune.py:976] (0/7) Epoch 15, batch 700, loss[loss=0.2145, simple_loss=0.2791, pruned_loss=0.07498, over 4772.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2543, pruned_loss=0.0581, over 927717.68 frames. ], batch size: 59, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:34:21,750 INFO [finetune.py:976] (0/7) Epoch 15, batch 750, loss[loss=0.1792, simple_loss=0.2499, pruned_loss=0.05422, over 4878.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.255, pruned_loss=0.05801, over 934640.40 frames. ], batch size: 43, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:34:22,439 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=80939.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:34:27,257 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.065e+02 1.649e+02 1.974e+02 2.385e+02 5.578e+02, threshold=3.948e+02, percent-clipped=5.0 +2023-04-27 06:34:54,970 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=80987.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:34:55,528 INFO [finetune.py:976] (0/7) Epoch 15, batch 800, loss[loss=0.2067, simple_loss=0.2728, pruned_loss=0.07027, over 4170.00 frames. ], tot_loss[loss=0.1844, simple_loss=0.2542, pruned_loss=0.05726, over 938754.98 frames. ], batch size: 65, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:35:35,143 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6694, 1.5156, 1.7031, 2.0516, 2.0643, 1.6683, 1.4087, 1.9163], + device='cuda:0'), covar=tensor([0.0805, 0.1204, 0.0766, 0.0558, 0.0562, 0.0804, 0.0768, 0.0490], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0205, 0.0185, 0.0174, 0.0180, 0.0185, 0.0156, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:35:35,654 INFO [finetune.py:976] (0/7) Epoch 15, batch 850, loss[loss=0.1835, simple_loss=0.2602, pruned_loss=0.05334, over 4815.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2521, pruned_loss=0.05634, over 943557.71 frames. ], batch size: 41, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:35:46,467 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.971e+01 1.581e+02 1.945e+02 2.309e+02 3.593e+02, threshold=3.889e+02, percent-clipped=0.0 +2023-04-27 06:35:47,779 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81049.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:36:41,753 INFO [finetune.py:976] (0/7) Epoch 15, batch 900, loss[loss=0.149, simple_loss=0.2283, pruned_loss=0.03486, over 4901.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2498, pruned_loss=0.05579, over 946240.02 frames. ], batch size: 35, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:36:49,895 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81092.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:36:52,921 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=81097.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:37:15,229 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81116.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:37:22,537 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81119.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:37:34,999 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81129.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:37:48,249 INFO [finetune.py:976] (0/7) Epoch 15, batch 950, loss[loss=0.1714, simple_loss=0.2355, pruned_loss=0.05368, over 4745.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2485, pruned_loss=0.05581, over 946613.66 frames. ], batch size: 23, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:38:04,674 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.625e+02 2.019e+02 2.368e+02 4.092e+02, threshold=4.038e+02, percent-clipped=2.0 +2023-04-27 06:38:06,678 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81150.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:38:17,044 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=81167.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:38:23,601 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81177.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:38:32,219 INFO [finetune.py:976] (0/7) Epoch 15, batch 1000, loss[loss=0.1926, simple_loss=0.269, pruned_loss=0.05813, over 4923.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2509, pruned_loss=0.05691, over 950495.41 frames. ], batch size: 36, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:38:38,868 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=81198.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:38:39,526 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9402, 2.4637, 1.0760, 1.3486, 1.9423, 1.1501, 3.2816, 1.4918], + device='cuda:0'), covar=tensor([0.0735, 0.0676, 0.0814, 0.1252, 0.0498, 0.1067, 0.0232, 0.0732], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0067, 0.0049, 0.0047, 0.0050, 0.0053, 0.0077, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 06:39:02,528 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8958, 1.0672, 1.5339, 1.6535, 1.5801, 1.6952, 1.5130, 1.5508], + device='cuda:0'), covar=tensor([0.4008, 0.5372, 0.4386, 0.4429, 0.5405, 0.7410, 0.4924, 0.4582], + device='cuda:0'), in_proj_covar=tensor([0.0329, 0.0374, 0.0317, 0.0331, 0.0341, 0.0399, 0.0355, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 06:39:05,803 INFO [finetune.py:976] (0/7) Epoch 15, batch 1050, loss[loss=0.1822, simple_loss=0.2538, pruned_loss=0.05529, over 4741.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2528, pruned_loss=0.05734, over 952727.52 frames. ], batch size: 27, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:39:11,771 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.267e+02 1.726e+02 1.946e+02 2.343e+02 3.308e+02, threshold=3.891e+02, percent-clipped=0.0 +2023-04-27 06:39:38,634 INFO [finetune.py:976] (0/7) Epoch 15, batch 1100, loss[loss=0.1649, simple_loss=0.2399, pruned_loss=0.04491, over 4914.00 frames. ], tot_loss[loss=0.1837, simple_loss=0.2538, pruned_loss=0.05675, over 954940.47 frames. ], batch size: 38, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:39:46,744 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81299.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:39:47,926 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81301.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:39:53,394 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0934, 1.7608, 2.0474, 2.4931, 2.5693, 2.0032, 1.6656, 2.0751], + device='cuda:0'), covar=tensor([0.0871, 0.1140, 0.0684, 0.0567, 0.0595, 0.0906, 0.0838, 0.0606], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0204, 0.0184, 0.0173, 0.0180, 0.0184, 0.0155, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:40:11,234 INFO [finetune.py:976] (0/7) Epoch 15, batch 1150, loss[loss=0.1709, simple_loss=0.2377, pruned_loss=0.05205, over 4777.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2531, pruned_loss=0.05615, over 954273.54 frames. ], batch size: 26, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:40:18,551 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.629e+02 1.924e+02 2.399e+02 3.865e+02, threshold=3.848e+02, percent-clipped=0.0 +2023-04-27 06:40:27,090 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81360.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:40:28,313 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81362.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:40:44,972 INFO [finetune.py:976] (0/7) Epoch 15, batch 1200, loss[loss=0.1966, simple_loss=0.2595, pruned_loss=0.06684, over 4925.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2516, pruned_loss=0.05556, over 954202.16 frames. ], batch size: 38, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:40:48,487 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81392.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:40:53,703 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1843, 1.6609, 2.0810, 2.4501, 2.0382, 1.5905, 1.2275, 1.7497], + device='cuda:0'), covar=tensor([0.3338, 0.3498, 0.1734, 0.2375, 0.2598, 0.2896, 0.4479, 0.2195], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0245, 0.0222, 0.0314, 0.0213, 0.0229, 0.0228, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 06:41:12,546 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81429.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:41:13,248 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 06:41:18,366 INFO [finetune.py:976] (0/7) Epoch 15, batch 1250, loss[loss=0.1741, simple_loss=0.2468, pruned_loss=0.05068, over 4742.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2509, pruned_loss=0.05619, over 953249.11 frames. ], batch size: 23, lr: 3.51e-03, grad_scale: 16.0 +2023-04-27 06:41:20,056 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=81440.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:41:31,409 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.048e+02 1.659e+02 1.910e+02 2.320e+02 4.141e+02, threshold=3.819e+02, percent-clipped=2.0 +2023-04-27 06:42:05,037 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81472.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:42:08,082 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=81477.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:42:20,542 INFO [finetune.py:976] (0/7) Epoch 15, batch 1300, loss[loss=0.1878, simple_loss=0.2442, pruned_loss=0.06569, over 4750.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2472, pruned_loss=0.05476, over 953037.89 frames. ], batch size: 59, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:43:12,244 INFO [finetune.py:976] (0/7) Epoch 15, batch 1350, loss[loss=0.2218, simple_loss=0.2873, pruned_loss=0.07809, over 4925.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.247, pruned_loss=0.05491, over 954188.03 frames. ], batch size: 38, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:43:24,613 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.020e+02 1.644e+02 2.016e+02 2.436e+02 4.078e+02, threshold=4.033e+02, percent-clipped=1.0 +2023-04-27 06:44:07,693 INFO [finetune.py:976] (0/7) Epoch 15, batch 1400, loss[loss=0.1601, simple_loss=0.239, pruned_loss=0.04061, over 4822.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2505, pruned_loss=0.05626, over 954789.78 frames. ], batch size: 33, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:44:10,758 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7530, 2.2380, 0.9079, 1.2378, 1.4855, 1.2008, 2.4575, 1.3449], + device='cuda:0'), covar=tensor([0.0721, 0.0609, 0.0612, 0.1225, 0.0457, 0.1015, 0.0272, 0.0688], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0076, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 06:44:27,536 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-27 06:44:37,169 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-27 06:44:41,267 INFO [finetune.py:976] (0/7) Epoch 15, batch 1450, loss[loss=0.1884, simple_loss=0.253, pruned_loss=0.0619, over 4821.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2517, pruned_loss=0.05602, over 955689.16 frames. ], batch size: 39, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:44:47,199 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.626e+02 2.006e+02 2.422e+02 4.010e+02, threshold=4.013e+02, percent-clipped=0.0 +2023-04-27 06:44:53,606 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81655.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:44:54,818 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=81657.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:45:04,208 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81670.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:45:15,082 INFO [finetune.py:976] (0/7) Epoch 15, batch 1500, loss[loss=0.1747, simple_loss=0.2524, pruned_loss=0.04847, over 4898.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.254, pruned_loss=0.05691, over 955554.25 frames. ], batch size: 36, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:45:17,607 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2443, 2.9736, 2.2279, 2.3508, 1.5341, 1.5818, 2.4077, 1.5853], + device='cuda:0'), covar=tensor([0.1707, 0.1450, 0.1420, 0.1595, 0.2463, 0.2023, 0.0971, 0.2045], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0212, 0.0168, 0.0203, 0.0199, 0.0183, 0.0155, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 06:45:44,067 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=81731.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 06:45:48,179 INFO [finetune.py:976] (0/7) Epoch 15, batch 1550, loss[loss=0.1889, simple_loss=0.2605, pruned_loss=0.0586, over 4907.00 frames. ], tot_loss[loss=0.1838, simple_loss=0.254, pruned_loss=0.05677, over 955740.84 frames. ], batch size: 37, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:45:53,673 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.553e+02 1.872e+02 2.288e+02 6.577e+02, threshold=3.744e+02, percent-clipped=2.0 +2023-04-27 06:46:04,674 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8802, 1.2984, 1.5496, 1.5205, 2.0413, 1.6631, 1.3549, 1.4616], + device='cuda:0'), covar=tensor([0.1656, 0.1561, 0.1867, 0.1385, 0.0793, 0.1488, 0.2364, 0.2306], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0318, 0.0353, 0.0292, 0.0330, 0.0314, 0.0304, 0.0362], + device='cuda:0'), out_proj_covar=tensor([6.4331e-05, 6.6512e-05, 7.5552e-05, 5.9593e-05, 6.8614e-05, 6.6450e-05, + 6.4297e-05, 7.7448e-05], device='cuda:0') +2023-04-27 06:46:11,795 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81772.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:46:21,445 INFO [finetune.py:976] (0/7) Epoch 15, batch 1600, loss[loss=0.1992, simple_loss=0.2632, pruned_loss=0.06757, over 4759.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2518, pruned_loss=0.05649, over 954583.56 frames. ], batch size: 27, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:46:42,451 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=81820.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:46:48,522 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8874, 1.7255, 1.9968, 2.2452, 2.2853, 1.9443, 1.4629, 2.0706], + device='cuda:0'), covar=tensor([0.0837, 0.1168, 0.0657, 0.0544, 0.0577, 0.0724, 0.0793, 0.0561], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0203, 0.0183, 0.0173, 0.0179, 0.0183, 0.0155, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:46:53,851 INFO [finetune.py:976] (0/7) Epoch 15, batch 1650, loss[loss=0.1434, simple_loss=0.2178, pruned_loss=0.03452, over 4819.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2498, pruned_loss=0.05577, over 955617.21 frames. ], batch size: 39, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:47:04,629 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.625e+02 1.858e+02 2.341e+02 4.893e+02, threshold=3.717e+02, percent-clipped=2.0 +2023-04-27 06:47:06,564 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6458, 1.8153, 0.9102, 1.3478, 1.8151, 1.4995, 1.4026, 1.4642], + device='cuda:0'), covar=tensor([0.0497, 0.0331, 0.0358, 0.0537, 0.0270, 0.0484, 0.0494, 0.0539], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 06:47:26,097 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8242, 1.4991, 2.0047, 2.2949, 1.9678, 1.8407, 1.9921, 1.8772], + device='cuda:0'), covar=tensor([0.4935, 0.6814, 0.6711, 0.6311, 0.6003, 0.9144, 0.8527, 0.8465], + device='cuda:0'), in_proj_covar=tensor([0.0414, 0.0406, 0.0494, 0.0507, 0.0442, 0.0465, 0.0472, 0.0475], + device='cuda:0'), out_proj_covar=tensor([9.9921e-05, 1.0034e-04, 1.1119e-04, 1.2047e-04, 1.0653e-04, 1.1203e-04, + 1.1247e-04, 1.1287e-04], device='cuda:0') +2023-04-27 06:47:51,864 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7273, 3.6609, 2.7265, 4.2953, 3.6813, 3.7173, 1.5574, 3.6816], + device='cuda:0'), covar=tensor([0.1700, 0.1261, 0.3113, 0.1603, 0.3998, 0.1734, 0.5546, 0.2345], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0215, 0.0250, 0.0302, 0.0298, 0.0247, 0.0271, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 06:47:59,312 INFO [finetune.py:976] (0/7) Epoch 15, batch 1700, loss[loss=0.2257, simple_loss=0.2845, pruned_loss=0.0835, over 4242.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2471, pruned_loss=0.05479, over 955328.10 frames. ], batch size: 65, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:49:06,093 INFO [finetune.py:976] (0/7) Epoch 15, batch 1750, loss[loss=0.1721, simple_loss=0.2525, pruned_loss=0.04588, over 4940.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2492, pruned_loss=0.05505, over 955631.64 frames. ], batch size: 38, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:49:07,438 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1911, 1.4018, 1.2920, 1.6097, 1.5221, 1.8386, 1.2576, 3.2903], + device='cuda:0'), covar=tensor([0.0635, 0.0871, 0.0849, 0.1261, 0.0684, 0.0542, 0.0807, 0.0177], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 06:49:16,877 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.080e+01 1.647e+02 2.011e+02 2.347e+02 5.729e+02, threshold=4.021e+02, percent-clipped=2.0 +2023-04-27 06:49:26,283 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81955.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:49:27,524 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=81957.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:49:34,137 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0534, 2.5246, 2.0808, 1.8395, 1.6431, 1.6520, 2.1523, 1.5813], + device='cuda:0'), covar=tensor([0.1425, 0.1295, 0.1247, 0.1595, 0.2169, 0.1764, 0.0862, 0.1865], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0211, 0.0167, 0.0202, 0.0199, 0.0182, 0.0154, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 06:49:48,428 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=81987.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:49:48,925 INFO [finetune.py:976] (0/7) Epoch 15, batch 1800, loss[loss=0.1563, simple_loss=0.2415, pruned_loss=0.03552, over 4789.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2521, pruned_loss=0.05585, over 952357.08 frames. ], batch size: 45, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:49:56,449 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-82000.pt +2023-04-27 06:49:59,483 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=82003.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:50:00,740 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=82005.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:50:16,416 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82026.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 06:50:21,886 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9162, 1.8366, 2.2777, 2.3801, 1.7592, 1.5832, 1.7505, 1.0133], + device='cuda:0'), covar=tensor([0.0582, 0.0750, 0.0467, 0.0696, 0.0895, 0.1162, 0.0900, 0.0870], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0069, 0.0068, 0.0067, 0.0074, 0.0095, 0.0074, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 06:50:24,045 INFO [finetune.py:976] (0/7) Epoch 15, batch 1850, loss[loss=0.2141, simple_loss=0.2777, pruned_loss=0.07525, over 4831.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2535, pruned_loss=0.0566, over 953240.31 frames. ], batch size: 49, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:50:29,487 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.760e+02 1.989e+02 2.247e+02 4.902e+02, threshold=3.979e+02, percent-clipped=1.0 +2023-04-27 06:50:30,239 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82048.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:50:34,541 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82055.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:50:37,149 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 06:50:38,375 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 06:50:56,619 INFO [finetune.py:976] (0/7) Epoch 15, batch 1900, loss[loss=0.1695, simple_loss=0.2385, pruned_loss=0.05026, over 4875.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2531, pruned_loss=0.05574, over 954835.27 frames. ], batch size: 32, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:50:59,067 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4442, 1.3941, 1.8032, 1.7494, 1.3607, 1.2509, 1.3938, 0.9437], + device='cuda:0'), covar=tensor([0.0551, 0.0630, 0.0416, 0.0515, 0.0764, 0.1092, 0.0616, 0.0646], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0069, 0.0069, 0.0067, 0.0074, 0.0095, 0.0074, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 06:51:14,303 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82116.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:51:30,519 INFO [finetune.py:976] (0/7) Epoch 15, batch 1950, loss[loss=0.2015, simple_loss=0.2688, pruned_loss=0.06711, over 4885.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2508, pruned_loss=0.05477, over 952165.37 frames. ], batch size: 35, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:51:36,462 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.029e+02 1.535e+02 1.831e+02 2.341e+02 3.747e+02, threshold=3.661e+02, percent-clipped=0.0 +2023-04-27 06:52:04,205 INFO [finetune.py:976] (0/7) Epoch 15, batch 2000, loss[loss=0.1809, simple_loss=0.2464, pruned_loss=0.05769, over 4936.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2486, pruned_loss=0.05475, over 948435.33 frames. ], batch size: 33, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:52:11,047 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-27 06:52:22,224 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-27 06:52:25,175 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7014, 2.3316, 2.5822, 3.0740, 3.0769, 2.3981, 2.1137, 2.5652], + device='cuda:0'), covar=tensor([0.0889, 0.0979, 0.0595, 0.0498, 0.0574, 0.0868, 0.0738, 0.0567], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0203, 0.0183, 0.0173, 0.0178, 0.0183, 0.0155, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:52:37,846 INFO [finetune.py:976] (0/7) Epoch 15, batch 2050, loss[loss=0.1746, simple_loss=0.2513, pruned_loss=0.04894, over 4829.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2466, pruned_loss=0.05471, over 951436.75 frames. ], batch size: 30, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:52:43,308 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.629e+02 1.931e+02 2.371e+02 3.947e+02, threshold=3.861e+02, percent-clipped=2.0 +2023-04-27 06:53:21,146 INFO [finetune.py:976] (0/7) Epoch 15, batch 2100, loss[loss=0.1606, simple_loss=0.2266, pruned_loss=0.04727, over 4778.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2462, pruned_loss=0.05478, over 952723.75 frames. ], batch size: 26, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:53:52,576 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5210, 1.4044, 4.3537, 4.0826, 3.8295, 4.1347, 4.0543, 3.8412], + device='cuda:0'), covar=tensor([0.7316, 0.5624, 0.0940, 0.1497, 0.1099, 0.1404, 0.1231, 0.1413], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0306, 0.0400, 0.0404, 0.0347, 0.0405, 0.0312, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:54:04,934 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82326.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:54:24,278 INFO [finetune.py:976] (0/7) Epoch 15, batch 2150, loss[loss=0.1815, simple_loss=0.2258, pruned_loss=0.0686, over 4182.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2502, pruned_loss=0.05584, over 952406.26 frames. ], batch size: 18, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:54:33,140 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82343.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:54:35,020 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82346.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:54:35,220 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-27 06:54:35,530 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.316e+02 1.732e+02 1.970e+02 2.465e+02 9.359e+02, threshold=3.940e+02, percent-clipped=1.0 +2023-04-27 06:54:58,333 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=82374.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:55:00,257 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9476, 1.7467, 2.1323, 2.4374, 2.0197, 1.8506, 2.0139, 1.9804], + device='cuda:0'), covar=tensor([0.4949, 0.7301, 0.7272, 0.6057, 0.6384, 0.8817, 0.9827, 0.9175], + device='cuda:0'), in_proj_covar=tensor([0.0415, 0.0406, 0.0496, 0.0509, 0.0444, 0.0466, 0.0474, 0.0477], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:55:13,106 INFO [finetune.py:976] (0/7) Epoch 15, batch 2200, loss[loss=0.1926, simple_loss=0.2714, pruned_loss=0.05688, over 4874.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2519, pruned_loss=0.05621, over 950612.98 frames. ], batch size: 34, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:55:42,651 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82407.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:55:45,047 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82411.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:56:07,722 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.70 vs. limit=2.0 +2023-04-27 06:56:12,256 INFO [finetune.py:976] (0/7) Epoch 15, batch 2250, loss[loss=0.1579, simple_loss=0.2284, pruned_loss=0.04371, over 4740.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2535, pruned_loss=0.05683, over 951216.36 frames. ], batch size: 27, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:56:19,194 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.596e+02 1.887e+02 2.124e+02 5.729e+02, threshold=3.773e+02, percent-clipped=1.0 +2023-04-27 06:56:35,984 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0675, 2.0579, 1.7402, 1.7178, 2.1113, 1.5922, 2.6408, 1.6004], + device='cuda:0'), covar=tensor([0.3725, 0.1966, 0.4841, 0.3129, 0.1926, 0.2749, 0.1472, 0.4261], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0349, 0.0430, 0.0356, 0.0383, 0.0385, 0.0376, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 06:56:45,463 INFO [finetune.py:976] (0/7) Epoch 15, batch 2300, loss[loss=0.1647, simple_loss=0.2393, pruned_loss=0.04505, over 4860.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2527, pruned_loss=0.05606, over 950926.95 frames. ], batch size: 34, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:56:49,019 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82493.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 06:57:06,841 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3125, 1.7715, 2.3287, 2.9093, 2.2374, 1.8020, 1.7179, 2.3030], + device='cuda:0'), covar=tensor([0.3655, 0.3590, 0.1744, 0.2641, 0.3115, 0.2775, 0.4453, 0.2292], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0247, 0.0223, 0.0317, 0.0215, 0.0229, 0.0230, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 06:57:18,140 INFO [finetune.py:976] (0/7) Epoch 15, batch 2350, loss[loss=0.1959, simple_loss=0.2392, pruned_loss=0.07629, over 4870.00 frames. ], tot_loss[loss=0.1822, simple_loss=0.2513, pruned_loss=0.05656, over 950615.56 frames. ], batch size: 34, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:57:25,042 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.877e+01 1.633e+02 1.887e+02 2.224e+02 3.641e+02, threshold=3.774e+02, percent-clipped=0.0 +2023-04-27 06:57:30,342 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82554.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 06:57:40,474 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82569.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:57:51,899 INFO [finetune.py:976] (0/7) Epoch 15, batch 2400, loss[loss=0.1888, simple_loss=0.2508, pruned_loss=0.06334, over 4869.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2485, pruned_loss=0.05578, over 951612.51 frames. ], batch size: 34, lr: 3.50e-03, grad_scale: 32.0 +2023-04-27 06:58:06,079 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3239, 1.5974, 1.5184, 1.8807, 1.8077, 2.0179, 1.4989, 3.6385], + device='cuda:0'), covar=tensor([0.0650, 0.0808, 0.0872, 0.1236, 0.0656, 0.0476, 0.0751, 0.0140], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 06:58:20,992 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=82630.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:58:25,777 INFO [finetune.py:976] (0/7) Epoch 15, batch 2450, loss[loss=0.2005, simple_loss=0.2545, pruned_loss=0.07328, over 4820.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2457, pruned_loss=0.05463, over 953924.62 frames. ], batch size: 33, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 06:58:28,856 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82643.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:58:28,886 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7479, 2.5521, 1.6683, 1.7364, 1.3352, 1.3596, 1.6460, 1.2338], + device='cuda:0'), covar=tensor([0.1868, 0.1209, 0.1758, 0.1808, 0.2482, 0.2332, 0.1131, 0.2186], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0213, 0.0168, 0.0204, 0.0201, 0.0183, 0.0155, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 06:58:31,161 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.750e+01 1.533e+02 1.809e+02 2.233e+02 3.484e+02, threshold=3.618e+02, percent-clipped=0.0 +2023-04-27 06:59:04,414 INFO [finetune.py:976] (0/7) Epoch 15, batch 2500, loss[loss=0.1597, simple_loss=0.2307, pruned_loss=0.04433, over 4722.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2487, pruned_loss=0.05626, over 954030.24 frames. ], batch size: 23, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 06:59:11,863 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=82691.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:59:24,990 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82702.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 06:59:37,480 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=82711.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:00:09,107 INFO [finetune.py:976] (0/7) Epoch 15, batch 2550, loss[loss=0.1705, simple_loss=0.2381, pruned_loss=0.0515, over 4776.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2515, pruned_loss=0.0565, over 953273.95 frames. ], batch size: 26, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:00:14,537 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.661e+02 1.980e+02 2.344e+02 4.065e+02, threshold=3.961e+02, percent-clipped=3.0 +2023-04-27 07:00:19,445 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1640, 1.6833, 2.1146, 2.4184, 2.0711, 1.6563, 1.2130, 1.7972], + device='cuda:0'), covar=tensor([0.3529, 0.3352, 0.1622, 0.2334, 0.2599, 0.2608, 0.4371, 0.2015], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0246, 0.0223, 0.0316, 0.0215, 0.0229, 0.0230, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 07:00:24,399 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=82759.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:00:47,932 INFO [finetune.py:976] (0/7) Epoch 15, batch 2600, loss[loss=0.2086, simple_loss=0.2785, pruned_loss=0.06933, over 4910.00 frames. ], tot_loss[loss=0.1836, simple_loss=0.2531, pruned_loss=0.05706, over 953426.70 frames. ], batch size: 42, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:01:43,238 INFO [finetune.py:976] (0/7) Epoch 15, batch 2650, loss[loss=0.1693, simple_loss=0.2495, pruned_loss=0.04456, over 4831.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.2551, pruned_loss=0.05801, over 952309.02 frames. ], batch size: 47, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:01:48,666 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.684e+02 1.897e+02 2.238e+02 3.554e+02, threshold=3.793e+02, percent-clipped=0.0 +2023-04-27 07:01:49,958 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82849.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 07:01:53,102 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 07:02:33,354 INFO [finetune.py:976] (0/7) Epoch 15, batch 2700, loss[loss=0.1955, simple_loss=0.2585, pruned_loss=0.06628, over 4870.00 frames. ], tot_loss[loss=0.1846, simple_loss=0.2543, pruned_loss=0.05752, over 952437.83 frames. ], batch size: 31, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:03:04,123 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=82925.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:03:12,571 INFO [finetune.py:976] (0/7) Epoch 15, batch 2750, loss[loss=0.1667, simple_loss=0.2333, pruned_loss=0.05009, over 4755.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2502, pruned_loss=0.0558, over 952373.43 frames. ], batch size: 26, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:03:18,528 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.634e+02 2.005e+02 2.343e+02 4.529e+02, threshold=4.010e+02, percent-clipped=3.0 +2023-04-27 07:03:28,524 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=82963.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:03:35,965 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4182, 1.3394, 4.0622, 3.7636, 3.5958, 3.8751, 3.8497, 3.5359], + device='cuda:0'), covar=tensor([0.7281, 0.5872, 0.1083, 0.1841, 0.1143, 0.1690, 0.1579, 0.1554], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0309, 0.0405, 0.0408, 0.0349, 0.0406, 0.0315, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:03:37,842 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9437, 1.4928, 1.4765, 1.6000, 2.1154, 1.6660, 1.3776, 1.3755], + device='cuda:0'), covar=tensor([0.1343, 0.1382, 0.1859, 0.1357, 0.0891, 0.1539, 0.1960, 0.2310], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0316, 0.0353, 0.0291, 0.0329, 0.0313, 0.0304, 0.0362], + device='cuda:0'), out_proj_covar=tensor([6.3700e-05, 6.6138e-05, 7.5460e-05, 5.9422e-05, 6.8573e-05, 6.6239e-05, + 6.4359e-05, 7.7292e-05], device='cuda:0') +2023-04-27 07:03:46,114 INFO [finetune.py:976] (0/7) Epoch 15, batch 2800, loss[loss=0.163, simple_loss=0.218, pruned_loss=0.05402, over 4050.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2467, pruned_loss=0.05456, over 952427.03 frames. ], batch size: 17, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:03:55,380 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83002.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:04:09,806 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83024.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:04:19,799 INFO [finetune.py:976] (0/7) Epoch 15, batch 2850, loss[loss=0.1593, simple_loss=0.2336, pruned_loss=0.04251, over 4872.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2466, pruned_loss=0.05492, over 953836.84 frames. ], batch size: 34, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:04:25,723 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.603e+01 1.615e+02 1.794e+02 2.149e+02 4.463e+02, threshold=3.588e+02, percent-clipped=2.0 +2023-04-27 07:04:27,625 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=83050.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:05:03,759 INFO [finetune.py:976] (0/7) Epoch 15, batch 2900, loss[loss=0.1716, simple_loss=0.2495, pruned_loss=0.04689, over 4844.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2493, pruned_loss=0.0558, over 954340.33 frames. ], batch size: 49, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:05:35,340 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1812, 1.4613, 1.3303, 1.6320, 1.5459, 1.7571, 1.3442, 3.1863], + device='cuda:0'), covar=tensor([0.0662, 0.0759, 0.0810, 0.1165, 0.0619, 0.0652, 0.0758, 0.0169], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 07:06:00,017 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.4689, 4.3729, 3.0049, 5.1259, 4.4517, 4.4236, 1.8904, 4.4205], + device='cuda:0'), covar=tensor([0.1643, 0.1094, 0.3371, 0.0948, 0.3000, 0.1502, 0.5548, 0.2189], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0215, 0.0251, 0.0303, 0.0299, 0.0247, 0.0270, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 07:06:07,042 INFO [finetune.py:976] (0/7) Epoch 15, batch 2950, loss[loss=0.1713, simple_loss=0.238, pruned_loss=0.05226, over 4323.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2534, pruned_loss=0.05676, over 955099.00 frames. ], batch size: 19, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:06:18,120 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.270e+02 1.793e+02 2.066e+02 2.627e+02 6.571e+02, threshold=4.132e+02, percent-clipped=5.0 +2023-04-27 07:06:19,456 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83149.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 07:06:42,521 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 07:06:53,338 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1552, 1.3087, 1.1678, 1.6076, 1.4434, 1.5142, 1.2421, 2.4383], + device='cuda:0'), covar=tensor([0.0617, 0.0853, 0.0892, 0.1310, 0.0704, 0.0528, 0.0828, 0.0247], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 07:06:56,294 INFO [finetune.py:976] (0/7) Epoch 15, batch 3000, loss[loss=0.2084, simple_loss=0.2753, pruned_loss=0.07073, over 4899.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2532, pruned_loss=0.05675, over 954215.94 frames. ], batch size: 36, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:06:56,295 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 07:07:06,876 INFO [finetune.py:1010] (0/7) Epoch 15, validation: loss=0.1516, simple_loss=0.2237, pruned_loss=0.03975, over 2265189.00 frames. +2023-04-27 07:07:06,877 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 07:07:12,349 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83196.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:07:12,901 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=83197.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 07:07:28,390 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1444, 2.8760, 2.0341, 2.2459, 1.5099, 1.5228, 2.3035, 1.3746], + device='cuda:0'), covar=tensor([0.1732, 0.1452, 0.1514, 0.1573, 0.2342, 0.1921, 0.0917, 0.2186], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0214, 0.0169, 0.0206, 0.0202, 0.0185, 0.0157, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 07:07:30,729 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83225.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:07:38,611 INFO [finetune.py:976] (0/7) Epoch 15, batch 3050, loss[loss=0.1969, simple_loss=0.2679, pruned_loss=0.06296, over 4907.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2529, pruned_loss=0.05642, over 953182.27 frames. ], batch size: 46, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:07:50,803 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.583e+02 1.890e+02 2.106e+02 3.614e+02, threshold=3.781e+02, percent-clipped=0.0 +2023-04-27 07:08:02,496 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83257.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:08:24,061 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=83273.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:08:36,033 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83284.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:08:42,492 INFO [finetune.py:976] (0/7) Epoch 15, batch 3100, loss[loss=0.1519, simple_loss=0.2139, pruned_loss=0.04498, over 4733.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2514, pruned_loss=0.05595, over 950387.53 frames. ], batch size: 23, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:09:13,287 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9788, 1.8238, 2.2460, 2.2999, 1.7735, 1.5593, 1.8613, 1.0993], + device='cuda:0'), covar=tensor([0.0614, 0.0822, 0.0489, 0.0794, 0.0808, 0.1154, 0.0715, 0.0847], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0069, 0.0068, 0.0075, 0.0096, 0.0075, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 07:09:15,035 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83319.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:09:26,582 INFO [finetune.py:976] (0/7) Epoch 15, batch 3150, loss[loss=0.1748, simple_loss=0.251, pruned_loss=0.04933, over 4912.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2481, pruned_loss=0.05468, over 947772.02 frames. ], batch size: 36, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:09:31,029 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83345.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:09:32,637 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.218e+02 1.641e+02 1.865e+02 2.202e+02 3.570e+02, threshold=3.730e+02, percent-clipped=0.0 +2023-04-27 07:09:58,167 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0230, 1.5344, 1.5252, 1.6641, 2.1408, 1.6924, 1.3588, 1.4143], + device='cuda:0'), covar=tensor([0.1310, 0.1260, 0.1828, 0.1202, 0.0790, 0.1324, 0.2196, 0.1904], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0319, 0.0356, 0.0294, 0.0333, 0.0316, 0.0307, 0.0366], + device='cuda:0'), out_proj_covar=tensor([6.4246e-05, 6.6741e-05, 7.6214e-05, 6.0161e-05, 6.9363e-05, 6.6806e-05, + 6.5035e-05, 7.8200e-05], device='cuda:0') +2023-04-27 07:09:58,734 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2232, 1.4567, 4.9927, 4.7128, 4.3657, 4.7494, 4.4015, 4.4343], + device='cuda:0'), covar=tensor([0.6484, 0.6425, 0.1161, 0.1907, 0.1095, 0.1678, 0.1561, 0.1527], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0306, 0.0402, 0.0405, 0.0347, 0.0404, 0.0313, 0.0359], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:09:59,876 INFO [finetune.py:976] (0/7) Epoch 15, batch 3200, loss[loss=0.1793, simple_loss=0.2435, pruned_loss=0.05757, over 4815.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2463, pruned_loss=0.05496, over 948987.04 frames. ], batch size: 33, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:10:45,247 INFO [finetune.py:976] (0/7) Epoch 15, batch 3250, loss[loss=0.1754, simple_loss=0.2414, pruned_loss=0.05477, over 4891.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2467, pruned_loss=0.05521, over 951008.86 frames. ], batch size: 32, lr: 3.49e-03, grad_scale: 32.0 +2023-04-27 07:10:56,129 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.623e+02 1.932e+02 2.416e+02 4.902e+02, threshold=3.863e+02, percent-clipped=4.0 +2023-04-27 07:10:57,499 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3981, 1.3413, 1.6663, 1.6544, 1.3461, 1.1542, 1.4321, 1.0842], + device='cuda:0'), covar=tensor([0.0687, 0.0650, 0.0487, 0.0547, 0.0743, 0.0994, 0.0601, 0.0666], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0069, 0.0068, 0.0075, 0.0096, 0.0075, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 07:11:50,144 INFO [finetune.py:976] (0/7) Epoch 15, batch 3300, loss[loss=0.1796, simple_loss=0.2633, pruned_loss=0.04794, over 4804.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2501, pruned_loss=0.05625, over 951038.54 frames. ], batch size: 45, lr: 3.49e-03, grad_scale: 64.0 +2023-04-27 07:12:23,032 INFO [finetune.py:976] (0/7) Epoch 15, batch 3350, loss[loss=0.1785, simple_loss=0.2558, pruned_loss=0.05064, over 4860.00 frames. ], tot_loss[loss=0.1829, simple_loss=0.2524, pruned_loss=0.05673, over 949732.93 frames. ], batch size: 34, lr: 3.49e-03, grad_scale: 64.0 +2023-04-27 07:12:26,765 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7215, 2.5426, 1.9750, 2.2141, 2.3623, 1.9763, 2.8579, 1.6184], + device='cuda:0'), covar=tensor([0.3527, 0.1467, 0.3687, 0.2842, 0.1683, 0.2638, 0.2059, 0.4286], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0347, 0.0427, 0.0357, 0.0384, 0.0384, 0.0372, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:12:28,444 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.803e+02 2.175e+02 2.734e+02 4.804e+02, threshold=4.350e+02, percent-clipped=5.0 +2023-04-27 07:12:31,569 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83552.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:12:56,720 INFO [finetune.py:976] (0/7) Epoch 15, batch 3400, loss[loss=0.1886, simple_loss=0.2574, pruned_loss=0.05994, over 4883.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2527, pruned_loss=0.05708, over 949047.27 frames. ], batch size: 32, lr: 3.49e-03, grad_scale: 64.0 +2023-04-27 07:13:09,221 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.12 vs. limit=5.0 +2023-04-27 07:13:17,698 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83619.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:13:26,778 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7830, 2.2570, 1.7515, 1.5453, 1.3353, 1.3334, 1.7636, 1.2806], + device='cuda:0'), covar=tensor([0.1757, 0.1338, 0.1554, 0.1774, 0.2583, 0.2032, 0.1174, 0.2229], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0215, 0.0169, 0.0205, 0.0201, 0.0185, 0.0157, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 07:13:30,155 INFO [finetune.py:976] (0/7) Epoch 15, batch 3450, loss[loss=0.1584, simple_loss=0.2345, pruned_loss=0.04111, over 4736.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2519, pruned_loss=0.05631, over 950291.21 frames. ], batch size: 54, lr: 3.49e-03, grad_scale: 64.0 +2023-04-27 07:13:31,411 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=83640.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:13:34,463 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2231, 1.5768, 1.4220, 1.4516, 1.3767, 1.2495, 1.3038, 1.0786], + device='cuda:0'), covar=tensor([0.1718, 0.1241, 0.0906, 0.1236, 0.3352, 0.1307, 0.1718, 0.2237], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0311, 0.0223, 0.0285, 0.0315, 0.0267, 0.0254, 0.0270], + device='cuda:0'), out_proj_covar=tensor([1.1712e-04, 1.2357e-04, 8.8762e-05, 1.1327e-04, 1.2817e-04, 1.0661e-04, + 1.0280e-04, 1.0769e-04], device='cuda:0') +2023-04-27 07:13:35,582 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.561e+02 1.911e+02 2.345e+02 4.135e+02, threshold=3.821e+02, percent-clipped=0.0 +2023-04-27 07:13:49,255 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=83667.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:14:13,519 INFO [finetune.py:976] (0/7) Epoch 15, batch 3500, loss[loss=0.1544, simple_loss=0.2216, pruned_loss=0.04362, over 4894.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2496, pruned_loss=0.05567, over 951108.86 frames. ], batch size: 32, lr: 3.49e-03, grad_scale: 64.0 +2023-04-27 07:15:15,482 INFO [finetune.py:976] (0/7) Epoch 15, batch 3550, loss[loss=0.1861, simple_loss=0.2494, pruned_loss=0.06136, over 4916.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.247, pruned_loss=0.05509, over 952393.14 frames. ], batch size: 37, lr: 3.49e-03, grad_scale: 64.0 +2023-04-27 07:15:21,398 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.464e+02 1.732e+02 2.080e+02 4.644e+02, threshold=3.463e+02, percent-clipped=1.0 +2023-04-27 07:15:29,352 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83760.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:15:49,274 INFO [finetune.py:976] (0/7) Epoch 15, batch 3600, loss[loss=0.1334, simple_loss=0.2011, pruned_loss=0.03282, over 4805.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2449, pruned_loss=0.05481, over 953253.74 frames. ], batch size: 25, lr: 3.49e-03, grad_scale: 64.0 +2023-04-27 07:16:26,206 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=83821.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:16:55,916 INFO [finetune.py:976] (0/7) Epoch 15, batch 3650, loss[loss=0.2247, simple_loss=0.2963, pruned_loss=0.07659, over 4748.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2468, pruned_loss=0.05536, over 953644.24 frames. ], batch size: 54, lr: 3.48e-03, grad_scale: 64.0 +2023-04-27 07:17:01,428 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.661e+01 1.633e+02 1.914e+02 2.329e+02 4.921e+02, threshold=3.827e+02, percent-clipped=4.0 +2023-04-27 07:17:04,988 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83852.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:17:05,654 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4836, 2.4092, 2.0338, 2.0760, 2.5525, 2.1505, 3.3384, 1.7709], + device='cuda:0'), covar=tensor([0.4322, 0.2300, 0.5083, 0.3841, 0.1977, 0.2721, 0.1988, 0.4545], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0344, 0.0424, 0.0355, 0.0381, 0.0380, 0.0371, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:17:29,863 INFO [finetune.py:976] (0/7) Epoch 15, batch 3700, loss[loss=0.1871, simple_loss=0.2455, pruned_loss=0.06428, over 4739.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.251, pruned_loss=0.05638, over 954287.10 frames. ], batch size: 23, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:17:37,193 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=83900.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:17:44,322 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9388, 3.9180, 2.7267, 4.5481, 3.9075, 3.9945, 1.7571, 3.9302], + device='cuda:0'), covar=tensor([0.1658, 0.1122, 0.3505, 0.1382, 0.3129, 0.1644, 0.5531, 0.2292], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0215, 0.0251, 0.0303, 0.0299, 0.0248, 0.0270, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 07:18:03,516 INFO [finetune.py:976] (0/7) Epoch 15, batch 3750, loss[loss=0.1795, simple_loss=0.2636, pruned_loss=0.04774, over 4759.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2528, pruned_loss=0.05679, over 955720.32 frames. ], batch size: 28, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:18:04,867 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=83940.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:18:09,569 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.106e+02 1.640e+02 2.069e+02 2.492e+02 6.161e+02, threshold=4.139e+02, percent-clipped=2.0 +2023-04-27 07:18:10,278 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83949.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:18:22,323 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7456, 1.8493, 0.9524, 1.5032, 2.2414, 1.5843, 1.5483, 1.6629], + device='cuda:0'), covar=tensor([0.0488, 0.0375, 0.0310, 0.0550, 0.0246, 0.0520, 0.0504, 0.0554], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0023, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0050, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 07:18:34,100 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=83985.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:18:35,288 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2829, 1.5921, 1.4947, 1.7711, 1.6998, 1.9514, 1.4533, 3.5037], + device='cuda:0'), covar=tensor([0.0596, 0.0759, 0.0795, 0.1145, 0.0649, 0.0551, 0.0741, 0.0170], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0038, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 07:18:36,294 INFO [finetune.py:976] (0/7) Epoch 15, batch 3800, loss[loss=0.1799, simple_loss=0.2518, pruned_loss=0.05394, over 4915.00 frames. ], tot_loss[loss=0.1843, simple_loss=0.254, pruned_loss=0.05732, over 955958.35 frames. ], batch size: 42, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:18:36,352 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=83988.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:18:44,183 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-84000.pt +2023-04-27 07:18:51,952 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84010.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 07:18:52,537 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2211, 2.0373, 1.8107, 1.6849, 2.1947, 1.8252, 2.6469, 1.5912], + device='cuda:0'), covar=tensor([0.3500, 0.1881, 0.4090, 0.2951, 0.1726, 0.2245, 0.1379, 0.4157], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0346, 0.0425, 0.0355, 0.0383, 0.0380, 0.0372, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:19:07,395 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5634, 1.2975, 4.3268, 4.0701, 3.7186, 4.0344, 3.8370, 3.8220], + device='cuda:0'), covar=tensor([0.6774, 0.5808, 0.1012, 0.1460, 0.1172, 0.1598, 0.1900, 0.1370], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0305, 0.0400, 0.0404, 0.0348, 0.0403, 0.0312, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:19:10,170 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4491, 3.0364, 1.1820, 1.6366, 2.5817, 1.4946, 4.2248, 2.1796], + device='cuda:0'), covar=tensor([0.0609, 0.0761, 0.0785, 0.1201, 0.0431, 0.0957, 0.0220, 0.0554], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0051, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 07:19:10,672 INFO [finetune.py:976] (0/7) Epoch 15, batch 3850, loss[loss=0.1722, simple_loss=0.234, pruned_loss=0.05518, over 4865.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2508, pruned_loss=0.05536, over 957180.38 frames. ], batch size: 34, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:19:21,648 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84046.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:19:22,756 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.590e+02 1.914e+02 2.294e+02 5.295e+02, threshold=3.828e+02, percent-clipped=1.0 +2023-04-27 07:20:15,362 INFO [finetune.py:976] (0/7) Epoch 15, batch 3900, loss[loss=0.207, simple_loss=0.2741, pruned_loss=0.06992, over 4918.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2486, pruned_loss=0.05505, over 957892.04 frames. ], batch size: 36, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:20:27,911 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84097.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:20:50,576 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84116.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:21:20,858 INFO [finetune.py:976] (0/7) Epoch 15, batch 3950, loss[loss=0.1426, simple_loss=0.214, pruned_loss=0.03559, over 4779.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2452, pruned_loss=0.05388, over 958202.68 frames. ], batch size: 26, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:21:34,730 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.122e+01 1.668e+02 1.985e+02 2.633e+02 4.581e+02, threshold=3.971e+02, percent-clipped=4.0 +2023-04-27 07:21:41,492 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84158.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:21:47,525 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8617, 2.3589, 2.0285, 2.1961, 1.6415, 1.9203, 1.9192, 1.4668], + device='cuda:0'), covar=tensor([0.2075, 0.1156, 0.0726, 0.1205, 0.3361, 0.1236, 0.2090, 0.2818], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0310, 0.0221, 0.0283, 0.0313, 0.0266, 0.0254, 0.0268], + device='cuda:0'), out_proj_covar=tensor([1.1656e-04, 1.2349e-04, 8.7920e-05, 1.1266e-04, 1.2750e-04, 1.0630e-04, + 1.0266e-04, 1.0681e-04], device='cuda:0') +2023-04-27 07:22:12,110 INFO [finetune.py:976] (0/7) Epoch 15, batch 4000, loss[loss=0.1983, simple_loss=0.2671, pruned_loss=0.06477, over 4911.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2443, pruned_loss=0.05334, over 958621.76 frames. ], batch size: 43, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:22:31,728 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84199.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:22:34,211 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-27 07:23:00,614 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84236.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:23:02,206 INFO [finetune.py:976] (0/7) Epoch 15, batch 4050, loss[loss=0.1558, simple_loss=0.2241, pruned_loss=0.04379, over 4826.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.2499, pruned_loss=0.05557, over 956316.04 frames. ], batch size: 30, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:23:09,758 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.868e+01 1.686e+02 2.049e+02 2.449e+02 3.463e+02, threshold=4.099e+02, percent-clipped=0.0 +2023-04-27 07:23:18,188 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84260.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:23:35,684 INFO [finetune.py:976] (0/7) Epoch 15, batch 4100, loss[loss=0.2402, simple_loss=0.2989, pruned_loss=0.09073, over 4733.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2519, pruned_loss=0.0555, over 956554.82 frames. ], batch size: 59, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:23:42,201 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8450, 1.4516, 1.3736, 1.5883, 2.0471, 1.6667, 1.4028, 1.3326], + device='cuda:0'), covar=tensor([0.1616, 0.1464, 0.1813, 0.1303, 0.0866, 0.1522, 0.2120, 0.2254], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0318, 0.0355, 0.0294, 0.0333, 0.0315, 0.0306, 0.0366], + device='cuda:0'), out_proj_covar=tensor([6.4396e-05, 6.6511e-05, 7.5877e-05, 6.0112e-05, 6.9313e-05, 6.6679e-05, + 6.4722e-05, 7.8106e-05], device='cuda:0') +2023-04-27 07:23:42,203 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84297.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:23:48,045 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84305.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 07:24:08,400 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84336.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:24:09,502 INFO [finetune.py:976] (0/7) Epoch 15, batch 4150, loss[loss=0.2156, simple_loss=0.2835, pruned_loss=0.07391, over 4812.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2535, pruned_loss=0.05642, over 955183.45 frames. ], batch size: 47, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:24:11,369 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84341.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:24:16,002 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.201e+02 1.658e+02 1.986e+02 2.406e+02 7.031e+02, threshold=3.971e+02, percent-clipped=3.0 +2023-04-27 07:24:17,825 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3686, 1.6909, 1.5827, 1.8951, 1.8186, 2.0936, 1.5001, 3.7997], + device='cuda:0'), covar=tensor([0.0604, 0.0785, 0.0801, 0.1213, 0.0631, 0.0470, 0.0732, 0.0130], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0057], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 07:24:21,864 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0323, 1.8331, 1.6623, 1.4139, 1.8960, 1.6704, 2.1638, 1.4567], + device='cuda:0'), covar=tensor([0.2747, 0.1386, 0.3542, 0.2101, 0.1224, 0.1644, 0.1376, 0.3746], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0345, 0.0424, 0.0353, 0.0381, 0.0379, 0.0371, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:24:43,282 INFO [finetune.py:976] (0/7) Epoch 15, batch 4200, loss[loss=0.1635, simple_loss=0.2361, pruned_loss=0.04542, over 4889.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2528, pruned_loss=0.0564, over 950814.59 frames. ], batch size: 32, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:24:49,257 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84397.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:24:52,485 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0931, 1.5714, 1.8981, 2.2044, 1.8858, 1.5450, 1.0505, 1.5677], + device='cuda:0'), covar=tensor([0.3564, 0.3649, 0.1884, 0.2325, 0.2593, 0.2769, 0.4684, 0.2308], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0246, 0.0224, 0.0316, 0.0215, 0.0228, 0.0229, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 07:25:03,267 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84416.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:25:05,737 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7530, 1.4063, 1.8583, 2.2349, 1.8919, 1.7608, 1.8389, 1.7986], + device='cuda:0'), covar=tensor([0.4608, 0.6826, 0.6649, 0.5873, 0.5737, 0.7695, 0.8056, 0.8130], + device='cuda:0'), in_proj_covar=tensor([0.0414, 0.0404, 0.0490, 0.0503, 0.0443, 0.0464, 0.0473, 0.0475], + device='cuda:0'), out_proj_covar=tensor([9.9795e-05, 9.9770e-05, 1.1034e-04, 1.1993e-04, 1.0657e-04, 1.1174e-04, + 1.1258e-04, 1.1278e-04], device='cuda:0') +2023-04-27 07:25:22,956 INFO [finetune.py:976] (0/7) Epoch 15, batch 4250, loss[loss=0.1954, simple_loss=0.2635, pruned_loss=0.06367, over 4890.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2506, pruned_loss=0.05591, over 952675.96 frames. ], batch size: 35, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:25:33,108 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.587e+02 1.989e+02 2.520e+02 5.183e+02, threshold=3.979e+02, percent-clipped=3.0 +2023-04-27 07:25:41,993 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84453.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:25:56,092 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=84464.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:26:28,013 INFO [finetune.py:976] (0/7) Epoch 15, batch 4300, loss[loss=0.164, simple_loss=0.2195, pruned_loss=0.05428, over 4827.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2483, pruned_loss=0.05566, over 952240.71 frames. ], batch size: 38, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:26:29,371 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6557, 2.0828, 2.4930, 3.1762, 2.4412, 1.9576, 1.8035, 2.3386], + device='cuda:0'), covar=tensor([0.3327, 0.3317, 0.1588, 0.2361, 0.2822, 0.2768, 0.4136, 0.2210], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0248, 0.0225, 0.0318, 0.0217, 0.0230, 0.0231, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 07:26:39,978 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 07:27:19,026 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84525.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:27:32,536 INFO [finetune.py:976] (0/7) Epoch 15, batch 4350, loss[loss=0.164, simple_loss=0.2333, pruned_loss=0.04731, over 4848.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2455, pruned_loss=0.05477, over 953257.75 frames. ], batch size: 49, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:27:44,743 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.607e+02 1.900e+02 2.324e+02 5.232e+02, threshold=3.801e+02, percent-clipped=2.0 +2023-04-27 07:27:54,629 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84555.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:28:02,904 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-27 07:28:14,869 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2922, 1.6265, 1.5325, 2.0409, 1.8811, 2.0717, 1.5824, 4.2997], + device='cuda:0'), covar=tensor([0.0585, 0.0824, 0.0815, 0.1234, 0.0667, 0.0548, 0.0770, 0.0111], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 07:28:26,808 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84586.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:28:27,953 INFO [finetune.py:976] (0/7) Epoch 15, batch 4400, loss[loss=0.1772, simple_loss=0.257, pruned_loss=0.04876, over 4821.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2444, pruned_loss=0.0541, over 951728.03 frames. ], batch size: 38, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:28:30,481 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84592.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:28:39,044 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84605.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:28:59,195 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84633.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:29:02,146 INFO [finetune.py:976] (0/7) Epoch 15, batch 4450, loss[loss=0.1769, simple_loss=0.2518, pruned_loss=0.05099, over 4821.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2477, pruned_loss=0.05488, over 948724.00 frames. ], batch size: 40, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:29:04,119 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84641.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:29:07,779 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8416, 4.2963, 0.7505, 2.1495, 2.5829, 2.7385, 2.3700, 0.8762], + device='cuda:0'), covar=tensor([0.1311, 0.0946, 0.2218, 0.1244, 0.0899, 0.1075, 0.1516, 0.2177], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0243, 0.0137, 0.0120, 0.0130, 0.0151, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 07:29:08,290 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.631e+02 1.866e+02 2.330e+02 4.316e+02, threshold=3.732e+02, percent-clipped=2.0 +2023-04-27 07:29:11,987 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=84653.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:29:13,923 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1717, 1.3881, 1.6855, 1.8312, 1.7165, 1.8173, 1.7118, 1.7252], + device='cuda:0'), covar=tensor([0.4526, 0.5401, 0.4736, 0.4664, 0.5775, 0.7872, 0.5235, 0.4886], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0376, 0.0320, 0.0336, 0.0346, 0.0401, 0.0357, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 07:29:36,300 INFO [finetune.py:976] (0/7) Epoch 15, batch 4500, loss[loss=0.1873, simple_loss=0.2665, pruned_loss=0.05408, over 4791.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2499, pruned_loss=0.05486, over 949083.33 frames. ], batch size: 29, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:29:36,988 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=84689.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:29:38,816 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84692.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:29:40,096 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84694.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:30:09,977 INFO [finetune.py:976] (0/7) Epoch 15, batch 4550, loss[loss=0.1916, simple_loss=0.2611, pruned_loss=0.06104, over 4913.00 frames. ], tot_loss[loss=0.1821, simple_loss=0.2523, pruned_loss=0.05592, over 949509.43 frames. ], batch size: 38, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:30:16,091 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.003e+02 1.700e+02 1.997e+02 2.528e+02 3.461e+02, threshold=3.994e+02, percent-clipped=0.0 +2023-04-27 07:30:19,281 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84753.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:30:43,750 INFO [finetune.py:976] (0/7) Epoch 15, batch 4600, loss[loss=0.1595, simple_loss=0.2217, pruned_loss=0.04865, over 4724.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2523, pruned_loss=0.05576, over 951156.94 frames. ], batch size: 23, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:30:51,278 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84800.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:30:51,837 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=84801.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:31:33,985 INFO [finetune.py:976] (0/7) Epoch 15, batch 4650, loss[loss=0.183, simple_loss=0.2456, pruned_loss=0.06021, over 4900.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2501, pruned_loss=0.05558, over 952802.53 frames. ], batch size: 43, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:31:45,956 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.111e+02 1.628e+02 1.889e+02 2.261e+02 4.327e+02, threshold=3.778e+02, percent-clipped=1.0 +2023-04-27 07:31:55,730 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84855.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:32:05,681 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8340, 2.0807, 1.1449, 1.5326, 2.3043, 1.7139, 1.6128, 1.7633], + device='cuda:0'), covar=tensor([0.0497, 0.0340, 0.0297, 0.0521, 0.0218, 0.0505, 0.0489, 0.0551], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0022, 0.0029, 0.0020, 0.0029, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 07:32:05,705 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=84861.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:32:30,054 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84881.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:32:39,704 INFO [finetune.py:976] (0/7) Epoch 15, batch 4700, loss[loss=0.1575, simple_loss=0.2225, pruned_loss=0.04628, over 4923.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2472, pruned_loss=0.05461, over 953967.14 frames. ], batch size: 37, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:32:42,716 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84892.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:33:00,640 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=84903.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:33:11,817 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0586, 2.7337, 2.2376, 2.4692, 1.8805, 2.1278, 2.2019, 1.7148], + device='cuda:0'), covar=tensor([0.2093, 0.1129, 0.0821, 0.1229, 0.2923, 0.1220, 0.1852, 0.2607], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0309, 0.0220, 0.0282, 0.0313, 0.0264, 0.0251, 0.0267], + device='cuda:0'), out_proj_covar=tensor([1.1588e-04, 1.2302e-04, 8.7893e-05, 1.1208e-04, 1.2741e-04, 1.0531e-04, + 1.0158e-04, 1.0627e-04], device='cuda:0') +2023-04-27 07:33:46,042 INFO [finetune.py:976] (0/7) Epoch 15, batch 4750, loss[loss=0.1822, simple_loss=0.2592, pruned_loss=0.05262, over 4907.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.246, pruned_loss=0.05437, over 955294.24 frames. ], batch size: 43, lr: 3.48e-03, grad_scale: 32.0 +2023-04-27 07:33:47,820 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=84940.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:33:53,645 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.954e+01 1.625e+02 2.007e+02 2.365e+02 3.914e+02, threshold=4.014e+02, percent-clipped=2.0 +2023-04-27 07:33:55,669 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=84951.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:33:58,634 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8707, 2.4836, 0.9516, 1.2523, 1.8182, 1.1281, 3.2651, 1.4629], + device='cuda:0'), covar=tensor([0.0885, 0.0838, 0.0970, 0.1671, 0.0705, 0.1355, 0.0329, 0.0968], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0046, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 07:34:09,553 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8403, 2.4960, 2.0570, 2.3096, 1.6633, 1.8782, 2.0420, 1.5842], + device='cuda:0'), covar=tensor([0.2071, 0.1115, 0.0878, 0.1205, 0.3136, 0.1369, 0.1999, 0.2608], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0308, 0.0220, 0.0281, 0.0312, 0.0263, 0.0250, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1525e-04, 1.2266e-04, 8.7656e-05, 1.1161e-04, 1.2698e-04, 1.0504e-04, + 1.0125e-04, 1.0569e-04], device='cuda:0') +2023-04-27 07:34:19,544 INFO [finetune.py:976] (0/7) Epoch 15, batch 4800, loss[loss=0.1744, simple_loss=0.2545, pruned_loss=0.0472, over 4836.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2478, pruned_loss=0.05466, over 955052.56 frames. ], batch size: 47, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:34:20,211 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=84989.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:34:23,047 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=84992.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:34:36,372 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85012.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:34:39,457 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4430, 2.2186, 2.6084, 2.9052, 2.4159, 2.3484, 2.5329, 2.4634], + device='cuda:0'), covar=tensor([0.4436, 0.6202, 0.6416, 0.5464, 0.6047, 0.7895, 0.7920, 0.7170], + device='cuda:0'), in_proj_covar=tensor([0.0415, 0.0405, 0.0492, 0.0505, 0.0444, 0.0467, 0.0474, 0.0477], + device='cuda:0'), out_proj_covar=tensor([1.0017e-04, 9.9959e-05, 1.1073e-04, 1.2032e-04, 1.0666e-04, 1.1239e-04, + 1.1278e-04, 1.1335e-04], device='cuda:0') +2023-04-27 07:34:53,702 INFO [finetune.py:976] (0/7) Epoch 15, batch 4850, loss[loss=0.1933, simple_loss=0.268, pruned_loss=0.0593, over 4843.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2506, pruned_loss=0.05549, over 952133.93 frames. ], batch size: 47, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:34:53,790 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4330, 0.9620, 0.4212, 1.1043, 1.0449, 1.3339, 1.1952, 1.1498], + device='cuda:0'), covar=tensor([0.0485, 0.0401, 0.0407, 0.0555, 0.0300, 0.0498, 0.0480, 0.0557], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0038, 0.0050, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 07:34:54,925 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=85040.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:34:59,166 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-27 07:35:01,196 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.698e+02 1.957e+02 2.327e+02 4.552e+02, threshold=3.914e+02, percent-clipped=3.0 +2023-04-27 07:35:27,222 INFO [finetune.py:976] (0/7) Epoch 15, batch 4900, loss[loss=0.1768, simple_loss=0.2432, pruned_loss=0.05515, over 4910.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2525, pruned_loss=0.05643, over 952694.10 frames. ], batch size: 37, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:35:29,160 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5227, 2.0210, 1.7623, 1.9107, 1.5080, 1.5804, 1.5989, 1.3862], + device='cuda:0'), covar=tensor([0.1837, 0.0991, 0.0838, 0.1016, 0.3022, 0.1108, 0.1657, 0.2170], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0310, 0.0221, 0.0282, 0.0314, 0.0265, 0.0252, 0.0268], + device='cuda:0'), out_proj_covar=tensor([1.1609e-04, 1.2316e-04, 8.8181e-05, 1.1234e-04, 1.2796e-04, 1.0575e-04, + 1.0208e-04, 1.0658e-04], device='cuda:0') +2023-04-27 07:35:29,736 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0523, 1.9516, 2.1516, 2.4059, 2.3644, 2.0628, 1.6812, 2.2827], + device='cuda:0'), covar=tensor([0.0877, 0.1110, 0.0706, 0.0636, 0.0665, 0.0888, 0.0940, 0.0550], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0205, 0.0184, 0.0175, 0.0180, 0.0185, 0.0156, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:35:40,315 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85107.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:35:45,208 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85115.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:36:00,015 INFO [finetune.py:976] (0/7) Epoch 15, batch 4950, loss[loss=0.1656, simple_loss=0.2445, pruned_loss=0.04337, over 4227.00 frames. ], tot_loss[loss=0.1833, simple_loss=0.2533, pruned_loss=0.05662, over 952675.07 frames. ], batch size: 65, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:36:07,085 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.205e+02 1.668e+02 1.905e+02 2.304e+02 4.878e+02, threshold=3.810e+02, percent-clipped=1.0 +2023-04-27 07:36:12,984 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85156.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:36:20,313 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85168.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 07:36:25,199 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85176.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:36:28,193 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85181.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:36:31,168 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1655, 1.0012, 1.0882, 1.3452, 1.4115, 1.1795, 0.8168, 1.3069], + device='cuda:0'), covar=tensor([0.0834, 0.1692, 0.1134, 0.0651, 0.0704, 0.0850, 0.1027, 0.0662], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0205, 0.0184, 0.0175, 0.0180, 0.0185, 0.0156, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:36:33,339 INFO [finetune.py:976] (0/7) Epoch 15, batch 5000, loss[loss=0.1806, simple_loss=0.2448, pruned_loss=0.05822, over 4895.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2503, pruned_loss=0.05489, over 951666.64 frames. ], batch size: 35, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:36:41,153 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-27 07:37:00,626 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=85229.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:37:12,539 INFO [finetune.py:976] (0/7) Epoch 15, batch 5050, loss[loss=0.166, simple_loss=0.2332, pruned_loss=0.04938, over 4865.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2482, pruned_loss=0.05492, over 952178.17 frames. ], batch size: 34, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:37:25,141 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.655e+02 2.048e+02 2.440e+02 4.868e+02, threshold=4.096e+02, percent-clipped=4.0 +2023-04-27 07:37:47,106 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5413, 3.2190, 0.9517, 1.8785, 1.9566, 2.3335, 1.8959, 0.9773], + device='cuda:0'), covar=tensor([0.1202, 0.0867, 0.1764, 0.1109, 0.0947, 0.0973, 0.1332, 0.1889], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0245, 0.0137, 0.0121, 0.0132, 0.0152, 0.0119, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 07:38:17,996 INFO [finetune.py:976] (0/7) Epoch 15, batch 5100, loss[loss=0.1628, simple_loss=0.2315, pruned_loss=0.04704, over 4787.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.245, pruned_loss=0.05395, over 954113.00 frames. ], batch size: 29, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:38:18,662 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85289.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:38:41,083 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85307.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:38:43,995 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1081, 1.3075, 1.5564, 1.7458, 2.0411, 1.7451, 1.4548, 1.4322], + device='cuda:0'), covar=tensor([0.1825, 0.2189, 0.2309, 0.1631, 0.1257, 0.1993, 0.2973, 0.2638], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0319, 0.0355, 0.0293, 0.0332, 0.0314, 0.0303, 0.0367], + device='cuda:0'), out_proj_covar=tensor([6.4132e-05, 6.6661e-05, 7.6010e-05, 5.9787e-05, 6.9214e-05, 6.6325e-05, + 6.4112e-05, 7.8271e-05], device='cuda:0') +2023-04-27 07:38:47,022 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-04-27 07:39:06,775 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=85337.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:39:07,341 INFO [finetune.py:976] (0/7) Epoch 15, batch 5150, loss[loss=0.1841, simple_loss=0.2567, pruned_loss=0.05573, over 4827.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2455, pruned_loss=0.05484, over 955368.51 frames. ], batch size: 30, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:39:14,976 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5334, 1.4866, 0.6604, 1.2380, 1.6813, 1.4065, 1.3289, 1.3641], + device='cuda:0'), covar=tensor([0.0509, 0.0388, 0.0380, 0.0577, 0.0280, 0.0503, 0.0505, 0.0558], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0038, 0.0051, 0.0037, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 07:39:20,282 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.606e+02 1.874e+02 2.297e+02 4.541e+02, threshold=3.747e+02, percent-clipped=1.0 +2023-04-27 07:39:47,578 INFO [finetune.py:976] (0/7) Epoch 15, batch 5200, loss[loss=0.2196, simple_loss=0.2889, pruned_loss=0.07512, over 4886.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2503, pruned_loss=0.05641, over 954269.93 frames. ], batch size: 32, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:40:15,423 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85428.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:40:21,369 INFO [finetune.py:976] (0/7) Epoch 15, batch 5250, loss[loss=0.144, simple_loss=0.226, pruned_loss=0.03102, over 4927.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2512, pruned_loss=0.05612, over 953436.00 frames. ], batch size: 33, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:40:27,422 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.183e+02 1.584e+02 2.032e+02 2.482e+02 6.006e+02, threshold=4.063e+02, percent-clipped=4.0 +2023-04-27 07:40:33,308 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85456.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:40:38,397 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85463.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 07:40:44,661 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85471.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:40:51,985 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85483.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:40:54,957 INFO [finetune.py:976] (0/7) Epoch 15, batch 5300, loss[loss=0.1695, simple_loss=0.2465, pruned_loss=0.04621, over 4842.00 frames. ], tot_loss[loss=0.1834, simple_loss=0.2532, pruned_loss=0.05686, over 952076.06 frames. ], batch size: 47, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:40:55,686 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85489.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:40:57,527 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7811, 2.4336, 1.8125, 1.8895, 1.2223, 1.2821, 1.9631, 1.2931], + device='cuda:0'), covar=tensor([0.1686, 0.1429, 0.1450, 0.1730, 0.2424, 0.2002, 0.1002, 0.2018], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0212, 0.0168, 0.0203, 0.0200, 0.0183, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 07:41:04,887 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=85504.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:41:10,125 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8266, 2.3737, 1.9330, 2.2476, 1.6965, 1.9016, 1.8824, 1.4940], + device='cuda:0'), covar=tensor([0.1906, 0.1110, 0.0915, 0.1149, 0.3164, 0.1282, 0.2114, 0.2529], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0306, 0.0219, 0.0278, 0.0310, 0.0262, 0.0249, 0.0266], + device='cuda:0'), out_proj_covar=tensor([1.1427e-04, 1.2182e-04, 8.7101e-05, 1.1059e-04, 1.2604e-04, 1.0436e-04, + 1.0078e-04, 1.0594e-04], device='cuda:0') +2023-04-27 07:41:23,612 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85530.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:41:28,365 INFO [finetune.py:976] (0/7) Epoch 15, batch 5350, loss[loss=0.1889, simple_loss=0.2535, pruned_loss=0.06212, over 4845.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2528, pruned_loss=0.05607, over 953435.30 frames. ], batch size: 47, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:41:32,143 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85544.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:41:34,434 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.243e+02 1.620e+02 1.904e+02 2.320e+02 4.507e+02, threshold=3.809e+02, percent-clipped=1.0 +2023-04-27 07:41:59,805 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6583, 3.3952, 0.9034, 1.8915, 1.9665, 2.3402, 2.0153, 1.0023], + device='cuda:0'), covar=tensor([0.1233, 0.0931, 0.1842, 0.1211, 0.0926, 0.0993, 0.1420, 0.1852], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0243, 0.0136, 0.0120, 0.0130, 0.0151, 0.0118, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 07:42:02,177 INFO [finetune.py:976] (0/7) Epoch 15, batch 5400, loss[loss=0.2092, simple_loss=0.2714, pruned_loss=0.07351, over 4930.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2508, pruned_loss=0.05566, over 954037.60 frames. ], batch size: 33, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:42:04,178 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85591.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:42:09,037 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0726, 2.4950, 0.8137, 1.4133, 1.4866, 1.8759, 1.5907, 0.8302], + device='cuda:0'), covar=tensor([0.1483, 0.1062, 0.1707, 0.1268, 0.1035, 0.0910, 0.1501, 0.1679], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0242, 0.0135, 0.0120, 0.0130, 0.0151, 0.0118, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 07:42:13,940 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85607.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:42:27,854 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85625.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:42:41,671 INFO [finetune.py:976] (0/7) Epoch 15, batch 5450, loss[loss=0.1302, simple_loss=0.2056, pruned_loss=0.02742, over 4744.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.2487, pruned_loss=0.05476, over 955241.53 frames. ], batch size: 59, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:42:53,052 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.619e+02 1.980e+02 2.331e+02 6.511e+02, threshold=3.961e+02, percent-clipped=1.0 +2023-04-27 07:42:57,389 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=85655.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:43:40,410 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85686.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 07:43:46,487 INFO [finetune.py:976] (0/7) Epoch 15, batch 5500, loss[loss=0.1298, simple_loss=0.2079, pruned_loss=0.02582, over 4847.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2452, pruned_loss=0.05374, over 956657.40 frames. ], batch size: 44, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:44:51,824 INFO [finetune.py:976] (0/7) Epoch 15, batch 5550, loss[loss=0.2026, simple_loss=0.2863, pruned_loss=0.05944, over 4860.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.248, pruned_loss=0.05532, over 955871.31 frames. ], batch size: 49, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:45:01,613 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0680, 1.9722, 4.6239, 4.4023, 4.1307, 4.3142, 4.2023, 4.0982], + device='cuda:0'), covar=tensor([0.5932, 0.4686, 0.0890, 0.1448, 0.0952, 0.2234, 0.1101, 0.1388], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0303, 0.0398, 0.0404, 0.0347, 0.0403, 0.0310, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:45:02,123 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.106e+02 1.632e+02 1.848e+02 2.307e+02 4.705e+02, threshold=3.696e+02, percent-clipped=4.0 +2023-04-27 07:45:11,418 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85763.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:45:16,808 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=85771.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:45:24,798 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85784.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:45:27,127 INFO [finetune.py:976] (0/7) Epoch 15, batch 5600, loss[loss=0.1876, simple_loss=0.2639, pruned_loss=0.05562, over 4907.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2502, pruned_loss=0.05586, over 954266.60 frames. ], batch size: 43, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:45:32,486 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 07:45:40,675 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=85811.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:45:45,399 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=85819.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:45:57,425 INFO [finetune.py:976] (0/7) Epoch 15, batch 5650, loss[loss=0.1661, simple_loss=0.2305, pruned_loss=0.05085, over 4768.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2527, pruned_loss=0.05641, over 954310.54 frames. ], batch size: 28, lr: 3.47e-03, grad_scale: 32.0 +2023-04-27 07:45:58,079 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85839.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:46:03,465 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.549e+02 1.787e+02 2.141e+02 3.216e+02, threshold=3.573e+02, percent-clipped=0.0 +2023-04-27 07:46:11,593 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.96 vs. limit=5.0 +2023-04-27 07:46:19,893 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85875.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:46:26,469 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85886.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:46:27,654 INFO [finetune.py:976] (0/7) Epoch 15, batch 5700, loss[loss=0.1374, simple_loss=0.2, pruned_loss=0.03739, over 3898.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2489, pruned_loss=0.05579, over 937798.74 frames. ], batch size: 17, lr: 3.47e-03, grad_scale: 64.0 +2023-04-27 07:46:30,805 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7581, 1.9895, 2.1884, 2.2709, 2.0913, 2.3306, 2.3346, 2.2460], + device='cuda:0'), covar=tensor([0.3161, 0.4794, 0.4361, 0.3915, 0.5144, 0.5953, 0.4173, 0.4093], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0375, 0.0320, 0.0334, 0.0345, 0.0400, 0.0355, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 07:46:44,641 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-15.pt +2023-04-27 07:46:59,231 INFO [finetune.py:976] (0/7) Epoch 16, batch 0, loss[loss=0.1987, simple_loss=0.2702, pruned_loss=0.06357, over 4911.00 frames. ], tot_loss[loss=0.1987, simple_loss=0.2702, pruned_loss=0.06357, over 4911.00 frames. ], batch size: 46, lr: 3.46e-03, grad_scale: 64.0 +2023-04-27 07:46:59,232 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 07:47:06,750 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1749, 2.5133, 0.9471, 1.4460, 1.9299, 1.3322, 3.0312, 1.6936], + device='cuda:0'), covar=tensor([0.0574, 0.0547, 0.0727, 0.1159, 0.0420, 0.0859, 0.0251, 0.0560], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0046, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 07:47:15,738 INFO [finetune.py:1010] (0/7) Epoch 16, validation: loss=0.1534, simple_loss=0.2252, pruned_loss=0.04076, over 2265189.00 frames. +2023-04-27 07:47:15,738 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 07:47:30,507 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85930.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 07:47:32,448 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-27 07:47:34,157 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85936.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:47:41,352 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.546e+02 1.841e+02 2.213e+02 4.481e+02, threshold=3.682e+02, percent-clipped=3.0 +2023-04-27 07:47:46,982 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=85957.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:47:53,440 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-27 07:47:53,856 INFO [finetune.py:976] (0/7) Epoch 16, batch 50, loss[loss=0.1897, simple_loss=0.2575, pruned_loss=0.06095, over 4863.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2551, pruned_loss=0.05877, over 214281.49 frames. ], batch size: 34, lr: 3.46e-03, grad_scale: 64.0 +2023-04-27 07:47:54,049 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 07:48:04,120 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=85981.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 07:48:10,287 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=85991.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 07:48:20,925 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-86000.pt +2023-04-27 07:48:23,359 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86002.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:48:44,017 INFO [finetune.py:976] (0/7) Epoch 16, batch 100, loss[loss=0.1821, simple_loss=0.2596, pruned_loss=0.05228, over 4864.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2486, pruned_loss=0.05603, over 378501.84 frames. ], batch size: 34, lr: 3.46e-03, grad_scale: 64.0 +2023-04-27 07:48:45,865 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86018.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:48:54,443 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7557, 2.0256, 1.6405, 1.4369, 1.2826, 1.3075, 1.6703, 1.2453], + device='cuda:0'), covar=tensor([0.1593, 0.1320, 0.1445, 0.1727, 0.2375, 0.1915, 0.1025, 0.1994], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0213, 0.0169, 0.0204, 0.0200, 0.0184, 0.0155, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 07:49:27,106 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.526e+02 1.932e+02 2.355e+02 3.544e+02, threshold=3.863e+02, percent-clipped=0.0 +2023-04-27 07:49:34,867 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86052.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:49:47,407 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86063.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:49:49,096 INFO [finetune.py:976] (0/7) Epoch 16, batch 150, loss[loss=0.1575, simple_loss=0.221, pruned_loss=0.04706, over 4763.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2437, pruned_loss=0.05479, over 507651.53 frames. ], batch size: 23, lr: 3.46e-03, grad_scale: 64.0 +2023-04-27 07:49:52,055 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86069.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:50:02,640 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86084.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:50:37,974 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86113.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:50:45,308 INFO [finetune.py:976] (0/7) Epoch 16, batch 200, loss[loss=0.2011, simple_loss=0.279, pruned_loss=0.06163, over 4863.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2427, pruned_loss=0.0546, over 607797.34 frames. ], batch size: 34, lr: 3.46e-03, grad_scale: 64.0 +2023-04-27 07:51:06,766 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86130.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:51:07,905 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86132.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:51:16,789 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86139.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:51:22,243 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.564e+02 1.820e+02 2.219e+02 4.567e+02, threshold=3.639e+02, percent-clipped=1.0 +2023-04-27 07:51:33,767 INFO [finetune.py:976] (0/7) Epoch 16, batch 250, loss[loss=0.2036, simple_loss=0.2725, pruned_loss=0.06737, over 4892.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.245, pruned_loss=0.05462, over 684915.64 frames. ], batch size: 32, lr: 3.46e-03, grad_scale: 64.0 +2023-04-27 07:51:48,354 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86186.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:51:48,949 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86187.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:52:06,459 INFO [finetune.py:976] (0/7) Epoch 16, batch 300, loss[loss=0.1839, simple_loss=0.2585, pruned_loss=0.0546, over 4900.00 frames. ], tot_loss[loss=0.1811, simple_loss=0.2501, pruned_loss=0.05599, over 744763.21 frames. ], batch size: 43, lr: 3.46e-03, grad_scale: 64.0 +2023-04-27 07:52:10,595 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-27 07:52:17,513 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86231.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:52:19,823 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86234.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:52:19,919 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7832, 1.1357, 1.6997, 2.1604, 1.8326, 1.6723, 1.6990, 1.7400], + device='cuda:0'), covar=tensor([0.4963, 0.6738, 0.6873, 0.6712, 0.6758, 0.8566, 0.8321, 0.7241], + device='cuda:0'), in_proj_covar=tensor([0.0419, 0.0409, 0.0496, 0.0508, 0.0447, 0.0470, 0.0477, 0.0481], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:52:27,197 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86246.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:52:28,286 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.717e+02 1.966e+02 2.307e+02 3.761e+02, threshold=3.931e+02, percent-clipped=1.0 +2023-04-27 07:52:31,457 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6747, 2.4115, 2.8251, 3.2120, 2.8828, 2.6176, 2.2670, 2.9046], + device='cuda:0'), covar=tensor([0.0876, 0.0975, 0.0527, 0.0541, 0.0659, 0.0886, 0.0748, 0.0478], + device='cuda:0'), in_proj_covar=tensor([0.0192, 0.0205, 0.0183, 0.0176, 0.0179, 0.0186, 0.0156, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:52:37,031 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.48 vs. limit=5.0 +2023-04-27 07:52:38,939 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-04-27 07:52:39,295 INFO [finetune.py:976] (0/7) Epoch 16, batch 350, loss[loss=0.1747, simple_loss=0.2471, pruned_loss=0.05112, over 4862.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2527, pruned_loss=0.05667, over 791298.74 frames. ], batch size: 34, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:52:50,056 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86281.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:52:54,333 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86286.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 07:53:07,233 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86307.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:53:10,811 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86313.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:53:12,610 INFO [finetune.py:976] (0/7) Epoch 16, batch 400, loss[loss=0.1681, simple_loss=0.2396, pruned_loss=0.04829, over 4820.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2526, pruned_loss=0.05618, over 827788.72 frames. ], batch size: 33, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:53:21,091 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86329.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:53:35,566 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.581e+02 1.858e+02 2.136e+02 3.923e+02, threshold=3.716e+02, percent-clipped=0.0 +2023-04-27 07:53:41,145 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86358.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:53:45,905 INFO [finetune.py:976] (0/7) Epoch 16, batch 450, loss[loss=0.2201, simple_loss=0.2681, pruned_loss=0.08605, over 4875.00 frames. ], tot_loss[loss=0.1803, simple_loss=0.2505, pruned_loss=0.05509, over 857198.90 frames. ], batch size: 34, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:53:51,857 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7377, 2.1598, 1.6854, 1.5696, 1.2507, 1.3171, 1.7751, 1.2122], + device='cuda:0'), covar=tensor([0.1779, 0.1470, 0.1592, 0.1860, 0.2483, 0.2107, 0.1050, 0.2138], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0214, 0.0170, 0.0206, 0.0202, 0.0185, 0.0156, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 07:54:08,698 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7414, 2.4295, 1.6021, 1.8214, 1.3006, 1.3053, 1.7324, 1.2421], + device='cuda:0'), covar=tensor([0.1828, 0.1385, 0.1759, 0.1777, 0.2530, 0.2306, 0.1080, 0.2182], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0214, 0.0170, 0.0206, 0.0202, 0.0185, 0.0156, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 07:54:14,887 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86408.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:54:19,720 INFO [finetune.py:976] (0/7) Epoch 16, batch 500, loss[loss=0.1632, simple_loss=0.2333, pruned_loss=0.04654, over 4739.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2483, pruned_loss=0.05471, over 877724.69 frames. ], batch size: 27, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:54:25,194 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86425.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:54:34,594 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3025, 1.7907, 2.2470, 2.6982, 2.1926, 1.7245, 1.5346, 2.0682], + device='cuda:0'), covar=tensor([0.3457, 0.3385, 0.1609, 0.2373, 0.2693, 0.2721, 0.4327, 0.2134], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0245, 0.0223, 0.0315, 0.0216, 0.0229, 0.0228, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 07:54:42,236 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86440.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:54:45,736 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86445.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:54:52,715 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.729e+01 1.556e+02 1.970e+02 2.447e+02 5.301e+02, threshold=3.940e+02, percent-clipped=3.0 +2023-04-27 07:54:57,857 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 07:55:08,755 INFO [finetune.py:976] (0/7) Epoch 16, batch 550, loss[loss=0.1877, simple_loss=0.2642, pruned_loss=0.05565, over 4819.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2447, pruned_loss=0.05317, over 894618.94 frames. ], batch size: 45, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:55:27,493 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-27 07:55:29,216 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 07:56:00,651 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86501.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:56:09,093 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-27 07:56:09,553 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86506.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 07:56:20,909 INFO [finetune.py:976] (0/7) Epoch 16, batch 600, loss[loss=0.213, simple_loss=0.265, pruned_loss=0.08045, over 4833.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2453, pruned_loss=0.05369, over 909035.25 frames. ], batch size: 30, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:56:32,811 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7531, 1.3747, 1.3831, 1.4834, 1.9658, 1.5987, 1.2577, 1.2999], + device='cuda:0'), covar=tensor([0.1451, 0.1364, 0.1745, 0.1328, 0.0791, 0.1392, 0.2053, 0.2023], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0312, 0.0350, 0.0289, 0.0329, 0.0311, 0.0299, 0.0360], + device='cuda:0'), out_proj_covar=tensor([6.3267e-05, 6.5197e-05, 7.4795e-05, 5.9126e-05, 6.8614e-05, 6.5673e-05, + 6.3203e-05, 7.6881e-05], device='cuda:0') +2023-04-27 07:56:35,204 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86531.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:56:45,549 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0935, 1.4449, 1.7229, 2.3099, 2.3606, 1.9326, 1.7210, 2.0751], + device='cuda:0'), covar=tensor([0.0649, 0.1405, 0.0823, 0.0470, 0.0462, 0.0719, 0.0739, 0.0521], + device='cuda:0'), in_proj_covar=tensor([0.0190, 0.0203, 0.0182, 0.0173, 0.0177, 0.0184, 0.0154, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:56:54,677 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 1.714e+02 1.969e+02 2.421e+02 4.960e+02, threshold=3.938e+02, percent-clipped=3.0 +2023-04-27 07:57:05,048 INFO [finetune.py:976] (0/7) Epoch 16, batch 650, loss[loss=0.1656, simple_loss=0.2427, pruned_loss=0.04424, over 4856.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2483, pruned_loss=0.05467, over 917607.73 frames. ], batch size: 31, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:57:12,974 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86579.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:57:13,625 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86580.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:57:18,205 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86586.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 07:57:20,576 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5797, 2.0161, 1.6796, 1.8755, 1.5280, 1.7803, 1.6025, 1.2637], + device='cuda:0'), covar=tensor([0.1968, 0.1259, 0.0934, 0.1301, 0.3515, 0.1256, 0.2026, 0.2577], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0310, 0.0222, 0.0282, 0.0315, 0.0265, 0.0252, 0.0268], + device='cuda:0'), out_proj_covar=tensor([1.1635e-04, 1.2343e-04, 8.8465e-05, 1.1218e-04, 1.2824e-04, 1.0553e-04, + 1.0205e-04, 1.0663e-04], device='cuda:0') +2023-04-27 07:57:29,904 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86602.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:57:36,594 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86613.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:57:38,332 INFO [finetune.py:976] (0/7) Epoch 16, batch 700, loss[loss=0.1167, simple_loss=0.1909, pruned_loss=0.02125, over 4789.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2505, pruned_loss=0.05495, over 927721.09 frames. ], batch size: 26, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:57:49,354 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86634.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 07:57:54,596 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86641.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:57:55,239 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7888, 1.0024, 1.5559, 1.6673, 1.6407, 1.7358, 1.5694, 1.5944], + device='cuda:0'), covar=tensor([0.3511, 0.4504, 0.3807, 0.3977, 0.4887, 0.6755, 0.4231, 0.4151], + device='cuda:0'), in_proj_covar=tensor([0.0330, 0.0371, 0.0318, 0.0332, 0.0344, 0.0397, 0.0352, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 07:58:00,292 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.513e+02 1.841e+02 2.164e+02 3.453e+02, threshold=3.681e+02, percent-clipped=0.0 +2023-04-27 07:58:06,397 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86658.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:58:08,176 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86661.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:58:11,207 INFO [finetune.py:976] (0/7) Epoch 16, batch 750, loss[loss=0.2127, simple_loss=0.2804, pruned_loss=0.07253, over 4802.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2511, pruned_loss=0.05485, over 933737.46 frames. ], batch size: 40, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:58:38,829 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86706.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:58:40,074 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86708.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:58:43,809 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-27 07:58:44,311 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9046, 2.0294, 1.9212, 1.6094, 2.1429, 1.7159, 2.6681, 1.6596], + device='cuda:0'), covar=tensor([0.3601, 0.1665, 0.4375, 0.2669, 0.1521, 0.2423, 0.1339, 0.4247], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0349, 0.0428, 0.0359, 0.0385, 0.0383, 0.0373, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:58:44,816 INFO [finetune.py:976] (0/7) Epoch 16, batch 800, loss[loss=0.1372, simple_loss=0.212, pruned_loss=0.03121, over 4754.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2512, pruned_loss=0.05462, over 937929.85 frames. ], batch size: 26, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:58:50,326 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86725.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:59:01,304 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-27 07:59:05,803 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.560e+02 1.911e+02 2.310e+02 3.714e+02, threshold=3.821e+02, percent-clipped=1.0 +2023-04-27 07:59:11,119 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86756.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:59:17,588 INFO [finetune.py:976] (0/7) Epoch 16, batch 850, loss[loss=0.2041, simple_loss=0.2739, pruned_loss=0.0672, over 4905.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2501, pruned_loss=0.05475, over 941015.90 frames. ], batch size: 36, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:59:21,919 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86773.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:59:36,374 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86796.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:59:39,980 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86801.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 07:59:49,823 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86814.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 07:59:50,494 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9419, 1.5059, 1.9596, 2.3170, 2.0033, 1.8578, 1.9260, 1.9217], + device='cuda:0'), covar=tensor([0.4941, 0.6504, 0.6600, 0.5592, 0.6560, 0.8292, 0.7813, 0.6544], + device='cuda:0'), in_proj_covar=tensor([0.0419, 0.0409, 0.0496, 0.0507, 0.0448, 0.0471, 0.0478, 0.0481], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 07:59:50,969 INFO [finetune.py:976] (0/7) Epoch 16, batch 900, loss[loss=0.1951, simple_loss=0.2582, pruned_loss=0.06602, over 3893.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2483, pruned_loss=0.05468, over 941812.05 frames. ], batch size: 17, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 07:59:58,451 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6135, 2.0369, 2.4373, 3.0749, 2.4267, 1.9026, 1.8293, 2.3007], + device='cuda:0'), covar=tensor([0.3877, 0.3561, 0.1758, 0.2729, 0.3052, 0.3000, 0.4403, 0.2395], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0249, 0.0225, 0.0318, 0.0219, 0.0231, 0.0230, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 08:00:22,184 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.518e+02 1.851e+02 2.220e+02 4.721e+02, threshold=3.701e+02, percent-clipped=1.0 +2023-04-27 08:00:52,313 INFO [finetune.py:976] (0/7) Epoch 16, batch 950, loss[loss=0.1416, simple_loss=0.2157, pruned_loss=0.03375, over 4771.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2455, pruned_loss=0.05399, over 946471.44 frames. ], batch size: 28, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 08:01:03,386 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86875.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:01:05,148 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=86878.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:01:05,183 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7627, 2.1807, 1.6892, 1.5068, 1.3008, 1.3285, 1.7389, 1.2128], + device='cuda:0'), covar=tensor([0.1748, 0.1342, 0.1541, 0.1797, 0.2457, 0.2101, 0.1069, 0.2180], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0212, 0.0168, 0.0203, 0.0199, 0.0183, 0.0155, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 08:01:36,760 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=86902.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:01:58,433 INFO [finetune.py:976] (0/7) Epoch 16, batch 1000, loss[loss=0.2306, simple_loss=0.2974, pruned_loss=0.08191, over 4909.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2485, pruned_loss=0.05519, over 949813.79 frames. ], batch size: 35, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 08:02:18,019 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=86936.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:02:19,922 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=86939.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:02:31,464 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.164e+02 1.765e+02 1.981e+02 2.453e+02 9.285e+02, threshold=3.962e+02, percent-clipped=3.0 +2023-04-27 08:02:37,025 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=86950.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:02:59,853 INFO [finetune.py:976] (0/7) Epoch 16, batch 1050, loss[loss=0.2171, simple_loss=0.2737, pruned_loss=0.08021, over 4835.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2504, pruned_loss=0.05547, over 949347.21 frames. ], batch size: 30, lr: 3.46e-03, grad_scale: 32.0 +2023-04-27 08:03:05,919 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6029, 2.0839, 1.8044, 1.9386, 1.6380, 1.8014, 1.7606, 1.3508], + device='cuda:0'), covar=tensor([0.1896, 0.1124, 0.0861, 0.1272, 0.3097, 0.1225, 0.1934, 0.2514], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0315, 0.0224, 0.0285, 0.0317, 0.0268, 0.0255, 0.0271], + device='cuda:0'), out_proj_covar=tensor([1.1730e-04, 1.2519e-04, 8.9292e-05, 1.1343e-04, 1.2901e-04, 1.0679e-04, + 1.0303e-04, 1.0780e-04], device='cuda:0') +2023-04-27 08:03:13,846 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6535, 1.5834, 0.7307, 1.3437, 1.7447, 1.5481, 1.4337, 1.4451], + device='cuda:0'), covar=tensor([0.0503, 0.0373, 0.0380, 0.0560, 0.0272, 0.0522, 0.0497, 0.0588], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0023, 0.0029, 0.0020, 0.0028, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 08:03:28,653 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87002.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 08:03:43,717 INFO [finetune.py:976] (0/7) Epoch 16, batch 1100, loss[loss=0.1556, simple_loss=0.2326, pruned_loss=0.03933, over 4818.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.251, pruned_loss=0.05522, over 950540.54 frames. ], batch size: 38, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:04:06,134 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.660e+02 1.994e+02 2.342e+02 3.600e+02, threshold=3.988e+02, percent-clipped=0.0 +2023-04-27 08:04:15,688 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87063.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 08:04:17,376 INFO [finetune.py:976] (0/7) Epoch 16, batch 1150, loss[loss=0.1622, simple_loss=0.2432, pruned_loss=0.04056, over 4858.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2515, pruned_loss=0.05509, over 949912.98 frames. ], batch size: 34, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:04:37,616 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87096.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:04:40,683 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87101.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 08:04:50,580 INFO [finetune.py:976] (0/7) Epoch 16, batch 1200, loss[loss=0.1882, simple_loss=0.2525, pruned_loss=0.06199, over 4886.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2487, pruned_loss=0.05444, over 949200.04 frames. ], batch size: 35, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:05:10,107 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=87144.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:05:13,048 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.748e+02 2.017e+02 2.271e+02 5.228e+02, threshold=4.034e+02, percent-clipped=1.0 +2023-04-27 08:05:13,114 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=87149.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:05:24,030 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7484, 2.2094, 2.5674, 3.2618, 2.5410, 2.0325, 2.1390, 2.5281], + device='cuda:0'), covar=tensor([0.3513, 0.3520, 0.1702, 0.2689, 0.3155, 0.2883, 0.3891, 0.2327], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0247, 0.0224, 0.0316, 0.0217, 0.0230, 0.0228, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 08:05:24,513 INFO [finetune.py:976] (0/7) Epoch 16, batch 1250, loss[loss=0.1191, simple_loss=0.1897, pruned_loss=0.02426, over 4701.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2472, pruned_loss=0.05362, over 950980.50 frames. ], batch size: 23, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:05:27,036 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87170.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:05:28,402 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-27 08:05:36,799 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9471, 2.4695, 2.0687, 2.3352, 1.7416, 2.0701, 2.1513, 1.6072], + device='cuda:0'), covar=tensor([0.1965, 0.1118, 0.0835, 0.1206, 0.2913, 0.1109, 0.1835, 0.2501], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0312, 0.0223, 0.0283, 0.0315, 0.0267, 0.0254, 0.0269], + device='cuda:0'), out_proj_covar=tensor([1.1672e-04, 1.2420e-04, 8.8793e-05, 1.1272e-04, 1.2822e-04, 1.0634e-04, + 1.0261e-04, 1.0727e-04], device='cuda:0') +2023-04-27 08:05:58,111 INFO [finetune.py:976] (0/7) Epoch 16, batch 1300, loss[loss=0.1917, simple_loss=0.2584, pruned_loss=0.06246, over 4930.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2437, pruned_loss=0.05229, over 952109.75 frames. ], batch size: 38, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:06:08,745 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87230.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:06:12,082 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87234.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:06:13,311 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87236.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:06:21,119 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.625e+02 1.883e+02 2.267e+02 3.477e+02, threshold=3.766e+02, percent-clipped=0.0 +2023-04-27 08:06:31,009 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9686, 1.9984, 1.8136, 1.7076, 2.1922, 1.8613, 2.6599, 1.5486], + device='cuda:0'), covar=tensor([0.3591, 0.1917, 0.4589, 0.2912, 0.1636, 0.2246, 0.1254, 0.4614], + device='cuda:0'), in_proj_covar=tensor([0.0347, 0.0354, 0.0433, 0.0364, 0.0391, 0.0387, 0.0378, 0.0427], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:06:31,975 INFO [finetune.py:976] (0/7) Epoch 16, batch 1350, loss[loss=0.1799, simple_loss=0.2596, pruned_loss=0.05013, over 4933.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2443, pruned_loss=0.05275, over 952237.81 frames. ], batch size: 33, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:06:45,594 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=87284.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:06:50,326 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87291.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:07:16,734 INFO [finetune.py:976] (0/7) Epoch 16, batch 1400, loss[loss=0.1892, simple_loss=0.2727, pruned_loss=0.05281, over 4933.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2481, pruned_loss=0.05373, over 953592.67 frames. ], batch size: 38, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:07:18,761 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-27 08:07:47,251 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.31 vs. limit=5.0 +2023-04-27 08:07:58,927 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.760e+02 2.100e+02 2.306e+02 6.240e+02, threshold=4.200e+02, percent-clipped=3.0 +2023-04-27 08:08:09,825 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87358.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 08:08:17,788 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87362.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:08:20,155 INFO [finetune.py:976] (0/7) Epoch 16, batch 1450, loss[loss=0.1937, simple_loss=0.2621, pruned_loss=0.06267, over 4854.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2492, pruned_loss=0.05392, over 953498.64 frames. ], batch size: 31, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:09:14,068 INFO [finetune.py:976] (0/7) Epoch 16, batch 1500, loss[loss=0.2096, simple_loss=0.2762, pruned_loss=0.07149, over 4817.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2513, pruned_loss=0.05496, over 954850.36 frames. ], batch size: 33, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:09:18,462 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87423.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:09:27,291 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6356, 0.9818, 1.6905, 2.1095, 1.6799, 1.5923, 1.6649, 1.6595], + device='cuda:0'), covar=tensor([0.4547, 0.6250, 0.5707, 0.5820, 0.5767, 0.7388, 0.7243, 0.7449], + device='cuda:0'), in_proj_covar=tensor([0.0418, 0.0408, 0.0496, 0.0507, 0.0447, 0.0471, 0.0477, 0.0480], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:09:36,460 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.658e+02 1.977e+02 2.415e+02 4.579e+02, threshold=3.953e+02, percent-clipped=1.0 +2023-04-27 08:09:43,854 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87461.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:09:46,778 INFO [finetune.py:976] (0/7) Epoch 16, batch 1550, loss[loss=0.2373, simple_loss=0.3025, pruned_loss=0.08606, over 4816.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2518, pruned_loss=0.05533, over 953180.51 frames. ], batch size: 38, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:09:49,311 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87470.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:10:07,688 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4857, 1.3513, 1.6937, 1.6801, 1.3520, 1.1526, 1.4286, 1.0545], + device='cuda:0'), covar=tensor([0.0542, 0.0641, 0.0444, 0.0596, 0.0792, 0.1191, 0.0588, 0.0624], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0070, 0.0068, 0.0077, 0.0098, 0.0076, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 08:10:10,709 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-27 08:10:19,978 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-27 08:10:30,184 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87505.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:10:42,330 INFO [finetune.py:976] (0/7) Epoch 16, batch 1600, loss[loss=0.1472, simple_loss=0.2172, pruned_loss=0.03859, over 4799.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.25, pruned_loss=0.05515, over 953079.85 frames. ], batch size: 29, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:10:42,549 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-27 08:10:43,622 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=87518.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:10:46,078 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87522.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:10:54,392 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87534.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:11:05,886 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.660e+02 1.902e+02 2.277e+02 5.320e+02, threshold=3.803e+02, percent-clipped=1.0 +2023-04-27 08:11:06,575 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2312, 1.2823, 3.8659, 3.5970, 3.5033, 3.7460, 3.7413, 3.4123], + device='cuda:0'), covar=tensor([0.7300, 0.5856, 0.1221, 0.1846, 0.1120, 0.1500, 0.1263, 0.1500], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0302, 0.0399, 0.0403, 0.0345, 0.0402, 0.0306, 0.0362], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:11:16,206 INFO [finetune.py:976] (0/7) Epoch 16, batch 1650, loss[loss=0.1757, simple_loss=0.2482, pruned_loss=0.05158, over 4827.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2481, pruned_loss=0.0543, over 954599.20 frames. ], batch size: 41, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:11:16,339 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87566.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 08:11:26,482 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=87582.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:11:28,237 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87584.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:11:29,408 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87586.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:11:31,280 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87589.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:11:43,130 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=87605.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:11:49,708 INFO [finetune.py:976] (0/7) Epoch 16, batch 1700, loss[loss=0.2038, simple_loss=0.272, pruned_loss=0.06781, over 4830.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2453, pruned_loss=0.05333, over 957443.02 frames. ], batch size: 33, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:11:49,862 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3418, 1.5386, 1.7309, 1.8556, 1.7366, 1.8262, 1.7898, 1.7563], + device='cuda:0'), covar=tensor([0.3995, 0.5597, 0.4549, 0.4541, 0.5343, 0.7456, 0.5218, 0.4998], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0374, 0.0320, 0.0333, 0.0345, 0.0397, 0.0354, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 08:11:54,673 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8912, 2.1829, 2.0958, 2.2338, 1.9769, 2.1992, 2.1379, 2.1562], + device='cuda:0'), covar=tensor([0.3975, 0.6721, 0.5448, 0.5433, 0.6467, 0.7731, 0.6872, 0.5817], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0374, 0.0319, 0.0333, 0.0345, 0.0397, 0.0354, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 08:12:08,467 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87645.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 08:12:12,242 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.609e+02 2.053e+02 2.465e+02 3.849e+02, threshold=4.106e+02, percent-clipped=1.0 +2023-04-27 08:12:12,969 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87650.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:12:18,830 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87658.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 08:12:23,600 INFO [finetune.py:976] (0/7) Epoch 16, batch 1750, loss[loss=0.1712, simple_loss=0.2516, pruned_loss=0.04536, over 4913.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.247, pruned_loss=0.0539, over 955915.65 frames. ], batch size: 36, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:12:23,709 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=87666.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 08:12:55,802 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=87706.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 08:13:07,614 INFO [finetune.py:976] (0/7) Epoch 16, batch 1800, loss[loss=0.2037, simple_loss=0.2631, pruned_loss=0.07215, over 4894.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2498, pruned_loss=0.05469, over 955826.50 frames. ], batch size: 35, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:13:14,221 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87718.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:13:48,597 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.59 vs. limit=2.0 +2023-04-27 08:13:49,464 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.657e+02 1.996e+02 2.458e+02 4.294e+02, threshold=3.992e+02, percent-clipped=1.0 +2023-04-27 08:14:08,969 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5402, 0.7069, 1.4864, 1.9493, 1.6171, 1.4529, 1.5076, 1.4922], + device='cuda:0'), covar=tensor([0.4373, 0.6141, 0.5526, 0.5860, 0.5567, 0.6856, 0.6891, 0.7420], + device='cuda:0'), in_proj_covar=tensor([0.0417, 0.0406, 0.0494, 0.0505, 0.0445, 0.0470, 0.0476, 0.0479], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:14:12,969 INFO [finetune.py:976] (0/7) Epoch 16, batch 1850, loss[loss=0.1533, simple_loss=0.2275, pruned_loss=0.03958, over 4804.00 frames. ], tot_loss[loss=0.181, simple_loss=0.251, pruned_loss=0.05553, over 954952.17 frames. ], batch size: 45, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:15:07,316 INFO [finetune.py:976] (0/7) Epoch 16, batch 1900, loss[loss=0.1696, simple_loss=0.2498, pruned_loss=0.04468, over 4806.00 frames. ], tot_loss[loss=0.1817, simple_loss=0.2516, pruned_loss=0.05589, over 954109.02 frames. ], batch size: 40, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:15:08,022 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87817.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:15:28,298 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.628e+02 1.844e+02 2.203e+02 4.331e+02, threshold=3.688e+02, percent-clipped=1.0 +2023-04-27 08:15:37,122 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87861.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 08:15:40,074 INFO [finetune.py:976] (0/7) Epoch 16, batch 1950, loss[loss=0.1287, simple_loss=0.2009, pruned_loss=0.02826, over 4223.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2508, pruned_loss=0.05535, over 952791.43 frames. ], batch size: 18, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:16:03,448 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4600, 2.6126, 2.2033, 2.3415, 2.7813, 2.3438, 3.5373, 1.8910], + device='cuda:0'), covar=tensor([0.4052, 0.2192, 0.4061, 0.3360, 0.1778, 0.2616, 0.1623, 0.4554], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0350, 0.0428, 0.0358, 0.0385, 0.0383, 0.0373, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:16:10,448 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=87886.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:16:47,482 INFO [finetune.py:976] (0/7) Epoch 16, batch 2000, loss[loss=0.1637, simple_loss=0.2352, pruned_loss=0.04606, over 4817.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2477, pruned_loss=0.05428, over 952961.58 frames. ], batch size: 41, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:16:58,994 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=87934.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:17:01,549 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7147, 1.1308, 1.7130, 2.2527, 1.8118, 1.6373, 1.7194, 1.6924], + device='cuda:0'), covar=tensor([0.4939, 0.7235, 0.7036, 0.6069, 0.6528, 0.8502, 0.8158, 0.9180], + device='cuda:0'), in_proj_covar=tensor([0.0418, 0.0407, 0.0495, 0.0506, 0.0445, 0.0471, 0.0476, 0.0480], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:17:02,677 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87940.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 08:17:05,747 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87945.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:17:08,484 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.621e+02 1.906e+02 2.380e+02 5.072e+02, threshold=3.811e+02, percent-clipped=4.0 +2023-04-27 08:17:17,311 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=87961.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 08:17:19,079 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3562, 2.8169, 2.3865, 2.6191, 2.0651, 2.4736, 2.3687, 1.8458], + device='cuda:0'), covar=tensor([0.1683, 0.1205, 0.0800, 0.1202, 0.2913, 0.1130, 0.1889, 0.2863], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0310, 0.0221, 0.0281, 0.0312, 0.0262, 0.0252, 0.0266], + device='cuda:0'), out_proj_covar=tensor([1.1521e-04, 1.2327e-04, 8.7949e-05, 1.1185e-04, 1.2728e-04, 1.0451e-04, + 1.0179e-04, 1.0579e-04], device='cuda:0') +2023-04-27 08:17:21,295 INFO [finetune.py:976] (0/7) Epoch 16, batch 2050, loss[loss=0.1664, simple_loss=0.234, pruned_loss=0.04943, over 4917.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2445, pruned_loss=0.05304, over 952423.13 frames. ], batch size: 37, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:17:42,583 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-88000.pt +2023-04-27 08:17:49,318 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-27 08:17:55,289 INFO [finetune.py:976] (0/7) Epoch 16, batch 2100, loss[loss=0.2003, simple_loss=0.2624, pruned_loss=0.06911, over 4836.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2447, pruned_loss=0.05366, over 950571.34 frames. ], batch size: 33, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:17:56,609 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88018.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:18:16,281 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.262e+02 1.786e+02 2.026e+02 2.569e+02 5.831e+02, threshold=4.052e+02, percent-clipped=4.0 +2023-04-27 08:18:28,514 INFO [finetune.py:976] (0/7) Epoch 16, batch 2150, loss[loss=0.2023, simple_loss=0.28, pruned_loss=0.06231, over 4819.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2476, pruned_loss=0.05447, over 952341.37 frames. ], batch size: 39, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:18:28,582 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=88066.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:18:43,900 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5043, 3.7522, 0.8857, 2.0162, 2.1055, 2.5778, 2.2364, 1.0773], + device='cuda:0'), covar=tensor([0.1474, 0.0996, 0.2021, 0.1228, 0.1022, 0.1050, 0.1366, 0.2047], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0243, 0.0136, 0.0120, 0.0130, 0.0152, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 08:18:50,550 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88100.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:19:00,078 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 08:19:01,076 INFO [finetune.py:976] (0/7) Epoch 16, batch 2200, loss[loss=0.1589, simple_loss=0.2318, pruned_loss=0.04302, over 4814.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2498, pruned_loss=0.05502, over 951069.07 frames. ], batch size: 39, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:19:02,288 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88117.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:19:34,552 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.597e+02 1.844e+02 2.202e+02 4.029e+02, threshold=3.688e+02, percent-clipped=0.0 +2023-04-27 08:19:47,612 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88161.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 08:19:47,634 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88161.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:19:50,025 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=88165.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:19:56,115 INFO [finetune.py:976] (0/7) Epoch 16, batch 2250, loss[loss=0.1988, simple_loss=0.2793, pruned_loss=0.05912, over 4811.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2514, pruned_loss=0.05589, over 950776.84 frames. ], batch size: 38, lr: 3.45e-03, grad_scale: 32.0 +2023-04-27 08:20:30,760 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9134, 1.8754, 1.8310, 1.5051, 2.0588, 1.6700, 2.5940, 1.5994], + device='cuda:0'), covar=tensor([0.3867, 0.1890, 0.4957, 0.2932, 0.1798, 0.2477, 0.1344, 0.4775], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0352, 0.0432, 0.0361, 0.0388, 0.0387, 0.0376, 0.0426], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:20:50,811 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=88209.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:21:01,647 INFO [finetune.py:976] (0/7) Epoch 16, batch 2300, loss[loss=0.2018, simple_loss=0.2574, pruned_loss=0.07307, over 4258.00 frames. ], tot_loss[loss=0.181, simple_loss=0.251, pruned_loss=0.05556, over 950851.60 frames. ], batch size: 18, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:21:18,883 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88240.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:21:19,516 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1463, 2.1211, 1.9587, 1.8541, 2.3358, 1.8452, 2.8519, 1.7551], + device='cuda:0'), covar=tensor([0.4111, 0.2063, 0.5010, 0.2925, 0.1762, 0.2716, 0.1295, 0.4567], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0351, 0.0432, 0.0361, 0.0388, 0.0387, 0.0377, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:21:21,926 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88245.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:21:24,255 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.059e+01 1.606e+02 1.877e+02 2.265e+02 8.419e+02, threshold=3.754e+02, percent-clipped=4.0 +2023-04-27 08:21:28,172 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 08:21:32,173 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88261.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 08:21:41,584 INFO [finetune.py:976] (0/7) Epoch 16, batch 2350, loss[loss=0.1347, simple_loss=0.2102, pruned_loss=0.02958, over 4764.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2483, pruned_loss=0.05463, over 953290.43 frames. ], batch size: 28, lr: 3.44e-03, grad_scale: 64.0 +2023-04-27 08:22:14,475 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=88288.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:22:16,366 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=88291.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 08:22:17,547 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=88293.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:22:19,416 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6499, 1.8312, 0.7218, 1.3757, 1.9053, 1.5190, 1.4568, 1.5044], + device='cuda:0'), covar=tensor([0.0494, 0.0346, 0.0367, 0.0553, 0.0269, 0.0519, 0.0493, 0.0566], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0037, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 08:22:38,198 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=88309.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:22:48,484 INFO [finetune.py:976] (0/7) Epoch 16, batch 2400, loss[loss=0.1757, simple_loss=0.2532, pruned_loss=0.04909, over 4762.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2466, pruned_loss=0.05433, over 954914.34 frames. ], batch size: 26, lr: 3.44e-03, grad_scale: 64.0 +2023-04-27 08:23:00,217 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5600, 1.4346, 1.8093, 1.8504, 1.4300, 1.3326, 1.5257, 0.9562], + device='cuda:0'), covar=tensor([0.0525, 0.0619, 0.0414, 0.0498, 0.0713, 0.1068, 0.0564, 0.0686], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0069, 0.0067, 0.0075, 0.0096, 0.0075, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 08:23:04,560 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-27 08:23:16,136 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 08:23:19,552 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-27 08:23:23,059 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.498e+02 1.832e+02 2.156e+02 3.450e+02, threshold=3.664e+02, percent-clipped=0.0 +2023-04-27 08:23:25,032 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=88352.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 08:23:33,497 INFO [finetune.py:976] (0/7) Epoch 16, batch 2450, loss[loss=0.1703, simple_loss=0.2276, pruned_loss=0.05651, over 4767.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2434, pruned_loss=0.05258, over 955508.05 frames. ], batch size: 28, lr: 3.44e-03, grad_scale: 64.0 +2023-04-27 08:23:52,926 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9577, 2.3792, 0.9947, 1.3707, 1.7732, 1.2364, 2.9481, 1.5463], + device='cuda:0'), covar=tensor([0.0696, 0.0592, 0.0732, 0.1191, 0.0467, 0.0969, 0.0245, 0.0631], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0046, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 08:24:05,398 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8639, 1.4901, 1.9513, 2.3954, 1.9953, 1.8239, 1.9124, 1.8324], + device='cuda:0'), covar=tensor([0.4895, 0.7078, 0.7074, 0.6123, 0.6324, 0.8528, 0.8639, 0.9567], + device='cuda:0'), in_proj_covar=tensor([0.0418, 0.0407, 0.0494, 0.0505, 0.0446, 0.0472, 0.0477, 0.0480], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:24:30,564 INFO [finetune.py:976] (0/7) Epoch 16, batch 2500, loss[loss=0.1796, simple_loss=0.2609, pruned_loss=0.04911, over 4908.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2461, pruned_loss=0.05383, over 955227.36 frames. ], batch size: 43, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:24:53,092 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6873, 1.2109, 1.8131, 2.1637, 1.7670, 1.6751, 1.7611, 1.7416], + device='cuda:0'), covar=tensor([0.4922, 0.6717, 0.6544, 0.6035, 0.6340, 0.8230, 0.8721, 0.8228], + device='cuda:0'), in_proj_covar=tensor([0.0419, 0.0408, 0.0495, 0.0507, 0.0447, 0.0473, 0.0478, 0.0481], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:24:54,797 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.712e+02 2.054e+02 2.521e+02 4.262e+02, threshold=4.109e+02, percent-clipped=2.0 +2023-04-27 08:24:58,634 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88456.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:25:04,746 INFO [finetune.py:976] (0/7) Epoch 16, batch 2550, loss[loss=0.1096, simple_loss=0.172, pruned_loss=0.02356, over 4353.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2495, pruned_loss=0.05483, over 954544.19 frames. ], batch size: 19, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:25:06,689 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3547, 2.0771, 2.1847, 2.6408, 2.5315, 2.3190, 1.9361, 2.5009], + device='cuda:0'), covar=tensor([0.0592, 0.0801, 0.0462, 0.0385, 0.0439, 0.0543, 0.0650, 0.0332], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0203, 0.0184, 0.0174, 0.0177, 0.0183, 0.0155, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:25:25,397 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9221, 1.6491, 4.0226, 3.7680, 3.5774, 3.7438, 3.5763, 3.6143], + device='cuda:0'), covar=tensor([0.6224, 0.5138, 0.0888, 0.1510, 0.1096, 0.1724, 0.3507, 0.1216], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0300, 0.0399, 0.0403, 0.0346, 0.0403, 0.0307, 0.0360], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:25:38,575 INFO [finetune.py:976] (0/7) Epoch 16, batch 2600, loss[loss=0.16, simple_loss=0.236, pruned_loss=0.04203, over 4033.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2502, pruned_loss=0.05446, over 954095.58 frames. ], batch size: 17, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:26:18,051 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.329e+02 1.735e+02 2.047e+02 2.467e+02 4.454e+02, threshold=4.094e+02, percent-clipped=2.0 +2023-04-27 08:26:32,314 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1254, 2.7517, 1.1113, 1.5276, 2.2985, 1.3752, 3.6810, 1.7813], + device='cuda:0'), covar=tensor([0.0674, 0.0595, 0.0808, 0.1232, 0.0446, 0.0967, 0.0303, 0.0613], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0046, 0.0050, 0.0052, 0.0075, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 08:26:39,229 INFO [finetune.py:976] (0/7) Epoch 16, batch 2650, loss[loss=0.1639, simple_loss=0.2392, pruned_loss=0.04428, over 4809.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2528, pruned_loss=0.05542, over 952764.99 frames. ], batch size: 25, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:27:20,286 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-27 08:27:29,039 INFO [finetune.py:976] (0/7) Epoch 16, batch 2700, loss[loss=0.1418, simple_loss=0.2119, pruned_loss=0.03587, over 4925.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2517, pruned_loss=0.0553, over 952853.93 frames. ], batch size: 33, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:28:11,106 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=88647.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 08:28:13,351 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.464e+02 1.764e+02 2.078e+02 3.620e+02, threshold=3.528e+02, percent-clipped=0.0 +2023-04-27 08:28:36,532 INFO [finetune.py:976] (0/7) Epoch 16, batch 2750, loss[loss=0.164, simple_loss=0.2271, pruned_loss=0.05045, over 4821.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2489, pruned_loss=0.05417, over 954838.63 frames. ], batch size: 33, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:29:21,107 INFO [finetune.py:976] (0/7) Epoch 16, batch 2800, loss[loss=0.1595, simple_loss=0.2356, pruned_loss=0.04167, over 4898.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2459, pruned_loss=0.053, over 956110.78 frames. ], batch size: 36, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:29:24,329 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7425, 1.3397, 1.3885, 1.5074, 1.8804, 1.5830, 1.2979, 1.3423], + device='cuda:0'), covar=tensor([0.1358, 0.1276, 0.1733, 0.1221, 0.0679, 0.1399, 0.2090, 0.1916], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0314, 0.0353, 0.0292, 0.0332, 0.0314, 0.0304, 0.0364], + device='cuda:0'), out_proj_covar=tensor([6.3990e-05, 6.5530e-05, 7.5311e-05, 5.9655e-05, 6.9153e-05, 6.6253e-05, + 6.4175e-05, 7.7705e-05], device='cuda:0') +2023-04-27 08:29:26,096 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3716, 1.1780, 1.5309, 1.5586, 1.3062, 1.1900, 1.2316, 0.8334], + device='cuda:0'), covar=tensor([0.0521, 0.0806, 0.0413, 0.0615, 0.0749, 0.1132, 0.0582, 0.0663], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0070, 0.0068, 0.0076, 0.0097, 0.0076, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 08:29:28,450 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6661, 1.6464, 1.7374, 1.3045, 1.8472, 1.4811, 2.2971, 1.5301], + device='cuda:0'), covar=tensor([0.3692, 0.1918, 0.4878, 0.2987, 0.1651, 0.2390, 0.1511, 0.4691], + device='cuda:0'), in_proj_covar=tensor([0.0348, 0.0353, 0.0433, 0.0363, 0.0390, 0.0388, 0.0378, 0.0426], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:29:42,735 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.582e+02 1.928e+02 2.328e+02 4.353e+02, threshold=3.856e+02, percent-clipped=4.0 +2023-04-27 08:29:47,487 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88756.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:29:54,457 INFO [finetune.py:976] (0/7) Epoch 16, batch 2850, loss[loss=0.2311, simple_loss=0.2966, pruned_loss=0.08275, over 4268.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2454, pruned_loss=0.05352, over 956737.46 frames. ], batch size: 65, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:30:14,893 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2721, 1.7928, 1.6830, 2.0664, 1.9543, 1.9964, 1.6494, 4.3327], + device='cuda:0'), covar=tensor([0.0552, 0.0751, 0.0757, 0.1122, 0.0598, 0.0587, 0.0699, 0.0096], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 08:30:20,172 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=88804.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:30:28,459 INFO [finetune.py:976] (0/7) Epoch 16, batch 2900, loss[loss=0.2284, simple_loss=0.299, pruned_loss=0.07891, over 4863.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2476, pruned_loss=0.05424, over 956152.70 frames. ], batch size: 44, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:30:40,662 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0694, 1.6066, 1.3532, 1.9465, 2.1382, 1.7986, 1.7744, 1.5185], + device='cuda:0'), covar=tensor([0.1758, 0.1421, 0.1701, 0.1716, 0.1095, 0.1920, 0.1939, 0.1982], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0312, 0.0351, 0.0290, 0.0329, 0.0312, 0.0301, 0.0362], + device='cuda:0'), out_proj_covar=tensor([6.3376e-05, 6.5060e-05, 7.4921e-05, 5.9226e-05, 6.8634e-05, 6.5726e-05, + 6.3550e-05, 7.7205e-05], device='cuda:0') +2023-04-27 08:30:41,263 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0397, 2.3692, 2.0725, 2.3328, 1.6396, 2.1448, 2.1731, 1.6281], + device='cuda:0'), covar=tensor([0.1831, 0.1197, 0.0780, 0.1153, 0.3078, 0.1060, 0.1900, 0.2378], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0312, 0.0222, 0.0282, 0.0315, 0.0264, 0.0253, 0.0268], + device='cuda:0'), out_proj_covar=tensor([1.1584e-04, 1.2432e-04, 8.8308e-05, 1.1217e-04, 1.2823e-04, 1.0502e-04, + 1.0226e-04, 1.0674e-04], device='cuda:0') +2023-04-27 08:30:50,711 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.633e+02 2.026e+02 2.312e+02 4.286e+02, threshold=4.053e+02, percent-clipped=1.0 +2023-04-27 08:30:52,858 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-27 08:31:02,492 INFO [finetune.py:976] (0/7) Epoch 16, batch 2950, loss[loss=0.184, simple_loss=0.2567, pruned_loss=0.05564, over 4890.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2508, pruned_loss=0.05543, over 955884.58 frames. ], batch size: 35, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:31:54,203 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3713, 1.6842, 1.7739, 1.8875, 1.7292, 1.8242, 1.8952, 1.8296], + device='cuda:0'), covar=tensor([0.3611, 0.5434, 0.4622, 0.4347, 0.5427, 0.6585, 0.4875, 0.4816], + device='cuda:0'), in_proj_covar=tensor([0.0330, 0.0369, 0.0316, 0.0331, 0.0342, 0.0393, 0.0351, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 08:31:58,222 INFO [finetune.py:976] (0/7) Epoch 16, batch 3000, loss[loss=0.1942, simple_loss=0.2728, pruned_loss=0.05777, over 4803.00 frames. ], tot_loss[loss=0.1815, simple_loss=0.2519, pruned_loss=0.05555, over 957254.49 frames. ], batch size: 45, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:31:58,223 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 08:32:03,863 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8558, 1.7798, 1.8583, 2.2104, 2.2145, 1.8115, 1.3562, 2.0877], + device='cuda:0'), covar=tensor([0.0742, 0.1179, 0.0700, 0.0463, 0.0555, 0.0831, 0.0797, 0.0469], + device='cuda:0'), in_proj_covar=tensor([0.0191, 0.0204, 0.0185, 0.0176, 0.0179, 0.0184, 0.0157, 0.0182], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:32:06,868 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4438, 1.3113, 1.5678, 1.6211, 1.3325, 1.2161, 1.3558, 0.8632], + device='cuda:0'), covar=tensor([0.0617, 0.0693, 0.0508, 0.0634, 0.0874, 0.1233, 0.0597, 0.0719], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0070, 0.0068, 0.0076, 0.0097, 0.0076, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 08:32:14,557 INFO [finetune.py:1010] (0/7) Epoch 16, validation: loss=0.1523, simple_loss=0.2234, pruned_loss=0.04062, over 2265189.00 frames. +2023-04-27 08:32:14,557 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 08:32:47,285 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=88947.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 08:32:49,015 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.654e+02 1.995e+02 2.342e+02 3.743e+02, threshold=3.990e+02, percent-clipped=0.0 +2023-04-27 08:32:59,787 INFO [finetune.py:976] (0/7) Epoch 16, batch 3050, loss[loss=0.2615, simple_loss=0.3112, pruned_loss=0.1059, over 4729.00 frames. ], tot_loss[loss=0.1818, simple_loss=0.2523, pruned_loss=0.05568, over 955799.59 frames. ], batch size: 54, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:33:20,100 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8881, 2.3643, 0.8984, 1.2088, 1.5438, 1.1411, 2.4973, 1.3697], + device='cuda:0'), covar=tensor([0.0731, 0.0573, 0.0698, 0.1297, 0.0496, 0.1078, 0.0370, 0.0729], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0046, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 08:33:30,454 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=88995.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 08:33:55,682 INFO [finetune.py:976] (0/7) Epoch 16, batch 3100, loss[loss=0.1683, simple_loss=0.2441, pruned_loss=0.04622, over 4815.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2506, pruned_loss=0.05455, over 957336.77 frames. ], batch size: 30, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:34:03,929 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9673, 1.4818, 1.8294, 1.7479, 1.8336, 1.4837, 0.9624, 1.4991], + device='cuda:0'), covar=tensor([0.3498, 0.3316, 0.1819, 0.2249, 0.2547, 0.2732, 0.3938, 0.1928], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0246, 0.0224, 0.0314, 0.0216, 0.0230, 0.0228, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 08:34:05,009 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4063, 1.0210, 0.4933, 1.1229, 1.0538, 1.3334, 1.2182, 1.1920], + device='cuda:0'), covar=tensor([0.0447, 0.0344, 0.0353, 0.0498, 0.0274, 0.0451, 0.0442, 0.0476], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0049, 0.0044, 0.0037, 0.0050, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 08:34:13,205 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89027.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:34:13,780 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6579, 3.4733, 0.9958, 1.9160, 2.0591, 2.4631, 1.9255, 0.9577], + device='cuda:0'), covar=tensor([0.1262, 0.0948, 0.1861, 0.1148, 0.0968, 0.1019, 0.1454, 0.1901], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0243, 0.0137, 0.0120, 0.0130, 0.0152, 0.0118, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 08:34:38,477 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.571e+02 1.837e+02 2.281e+02 4.804e+02, threshold=3.674e+02, percent-clipped=2.0 +2023-04-27 08:34:47,707 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8694, 1.5072, 1.9484, 2.3508, 1.9411, 1.8489, 1.9805, 1.8847], + device='cuda:0'), covar=tensor([0.5402, 0.7348, 0.6832, 0.6470, 0.6702, 0.9101, 0.9157, 0.8725], + device='cuda:0'), in_proj_covar=tensor([0.0417, 0.0407, 0.0493, 0.0507, 0.0447, 0.0472, 0.0477, 0.0480], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:34:59,022 INFO [finetune.py:976] (0/7) Epoch 16, batch 3150, loss[loss=0.2193, simple_loss=0.2852, pruned_loss=0.07671, over 4793.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2495, pruned_loss=0.0547, over 957018.30 frames. ], batch size: 29, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:35:14,722 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89088.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:35:37,803 INFO [finetune.py:976] (0/7) Epoch 16, batch 3200, loss[loss=0.2048, simple_loss=0.2707, pruned_loss=0.06943, over 4348.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2449, pruned_loss=0.05319, over 955459.13 frames. ], batch size: 65, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:36:02,728 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5332, 1.1104, 1.2666, 1.1820, 1.6456, 1.3713, 1.1204, 1.2201], + device='cuda:0'), covar=tensor([0.1614, 0.1570, 0.2211, 0.1603, 0.0996, 0.1609, 0.2070, 0.2627], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0314, 0.0354, 0.0292, 0.0331, 0.0314, 0.0304, 0.0366], + device='cuda:0'), out_proj_covar=tensor([6.4169e-05, 6.5512e-05, 7.5630e-05, 5.9553e-05, 6.9021e-05, 6.6199e-05, + 6.4158e-05, 7.8139e-05], device='cuda:0') +2023-04-27 08:36:23,960 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.556e+02 1.903e+02 2.206e+02 4.255e+02, threshold=3.807e+02, percent-clipped=1.0 +2023-04-27 08:36:31,253 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6164, 1.1692, 4.2259, 3.9458, 3.7399, 3.9761, 3.8843, 3.7227], + device='cuda:0'), covar=tensor([0.7379, 0.6174, 0.1013, 0.1790, 0.1081, 0.1753, 0.2117, 0.1686], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0300, 0.0397, 0.0400, 0.0343, 0.0402, 0.0305, 0.0358], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:36:44,944 INFO [finetune.py:976] (0/7) Epoch 16, batch 3250, loss[loss=0.2136, simple_loss=0.2807, pruned_loss=0.07326, over 4916.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2468, pruned_loss=0.05377, over 954930.67 frames. ], batch size: 36, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:36:46,899 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89169.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:37:46,006 INFO [finetune.py:976] (0/7) Epoch 16, batch 3300, loss[loss=0.2225, simple_loss=0.294, pruned_loss=0.07547, over 4175.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2518, pruned_loss=0.05525, over 955115.70 frames. ], batch size: 65, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:38:07,652 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89230.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:38:12,866 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.62 vs. limit=2.0 +2023-04-27 08:38:17,539 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2086, 2.4232, 1.1006, 1.3974, 1.8757, 1.3071, 3.0411, 1.7194], + device='cuda:0'), covar=tensor([0.0560, 0.0636, 0.0722, 0.1171, 0.0455, 0.0944, 0.0311, 0.0648], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0046, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 08:38:21,745 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.219e+02 1.680e+02 1.944e+02 2.414e+02 4.518e+02, threshold=3.887e+02, percent-clipped=2.0 +2023-04-27 08:38:43,925 INFO [finetune.py:976] (0/7) Epoch 16, batch 3350, loss[loss=0.196, simple_loss=0.2739, pruned_loss=0.05906, over 4911.00 frames. ], tot_loss[loss=0.1832, simple_loss=0.2536, pruned_loss=0.05635, over 951824.50 frames. ], batch size: 36, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:39:17,175 INFO [finetune.py:976] (0/7) Epoch 16, batch 3400, loss[loss=0.1613, simple_loss=0.2352, pruned_loss=0.0437, over 4779.00 frames. ], tot_loss[loss=0.1845, simple_loss=0.2545, pruned_loss=0.05722, over 948122.80 frames. ], batch size: 26, lr: 3.44e-03, grad_scale: 32.0 +2023-04-27 08:39:40,164 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.554e+02 1.949e+02 2.299e+02 3.513e+02, threshold=3.898e+02, percent-clipped=0.0 +2023-04-27 08:39:53,662 INFO [finetune.py:976] (0/7) Epoch 16, batch 3450, loss[loss=0.1638, simple_loss=0.2415, pruned_loss=0.04305, over 4919.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2538, pruned_loss=0.05658, over 949843.08 frames. ], batch size: 38, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:40:04,306 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89374.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:40:16,185 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89383.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:40:42,498 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 08:40:49,833 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-27 08:40:54,851 INFO [finetune.py:976] (0/7) Epoch 16, batch 3500, loss[loss=0.1955, simple_loss=0.2629, pruned_loss=0.06406, over 4928.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2508, pruned_loss=0.05558, over 950957.62 frames. ], batch size: 38, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:41:00,461 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89425.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:41:07,505 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89435.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:41:09,965 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89439.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 08:41:18,074 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.586e+02 1.877e+02 2.284e+02 3.930e+02, threshold=3.755e+02, percent-clipped=1.0 +2023-04-27 08:41:21,298 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4450, 1.7374, 1.7712, 1.9696, 1.7882, 1.9065, 1.8895, 1.8406], + device='cuda:0'), covar=tensor([0.4207, 0.6251, 0.4967, 0.4834, 0.6146, 0.7725, 0.6214, 0.5675], + device='cuda:0'), in_proj_covar=tensor([0.0334, 0.0375, 0.0319, 0.0335, 0.0346, 0.0397, 0.0355, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 08:41:34,093 INFO [finetune.py:976] (0/7) Epoch 16, batch 3550, loss[loss=0.1699, simple_loss=0.2336, pruned_loss=0.05308, over 4794.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2479, pruned_loss=0.05472, over 953528.61 frames. ], batch size: 29, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:41:43,441 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-27 08:41:58,275 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89486.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:42:08,154 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89500.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 08:42:20,185 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89509.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:42:29,609 INFO [finetune.py:976] (0/7) Epoch 16, batch 3600, loss[loss=0.1811, simple_loss=0.2568, pruned_loss=0.05271, over 4932.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2461, pruned_loss=0.05412, over 955071.89 frames. ], batch size: 42, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:42:41,025 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89525.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:43:13,812 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.211e+02 1.668e+02 1.907e+02 2.305e+02 5.911e+02, threshold=3.815e+02, percent-clipped=3.0 +2023-04-27 08:43:36,162 INFO [finetune.py:976] (0/7) Epoch 16, batch 3650, loss[loss=0.1954, simple_loss=0.2619, pruned_loss=0.0644, over 4830.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2491, pruned_loss=0.056, over 953583.73 frames. ], batch size: 30, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:43:38,718 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89570.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:44:11,451 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3986, 1.1974, 1.5918, 1.5683, 1.3152, 1.1675, 1.2976, 0.9209], + device='cuda:0'), covar=tensor([0.0579, 0.0726, 0.0417, 0.0604, 0.0764, 0.1385, 0.0621, 0.0693], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0071, 0.0070, 0.0068, 0.0076, 0.0097, 0.0076, 0.0069], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 08:44:30,811 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4954, 3.0367, 0.7676, 1.5997, 1.8119, 2.1975, 1.7688, 0.9594], + device='cuda:0'), covar=tensor([0.1376, 0.0984, 0.2116, 0.1357, 0.1092, 0.1016, 0.1676, 0.1811], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0247, 0.0139, 0.0122, 0.0132, 0.0154, 0.0119, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 08:44:42,832 INFO [finetune.py:976] (0/7) Epoch 16, batch 3700, loss[loss=0.1728, simple_loss=0.244, pruned_loss=0.05077, over 4933.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2522, pruned_loss=0.05633, over 954842.87 frames. ], batch size: 33, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:45:26,134 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.109e+02 1.724e+02 2.041e+02 2.428e+02 5.061e+02, threshold=4.082e+02, percent-clipped=2.0 +2023-04-27 08:45:47,838 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 08:45:48,268 INFO [finetune.py:976] (0/7) Epoch 16, batch 3750, loss[loss=0.1754, simple_loss=0.2369, pruned_loss=0.05694, over 4731.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2526, pruned_loss=0.05645, over 952915.50 frames. ], batch size: 23, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:46:02,526 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89680.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:46:10,295 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89683.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:46:41,064 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8044, 2.3615, 2.0233, 2.2093, 1.6683, 1.9839, 1.9451, 1.5363], + device='cuda:0'), covar=tensor([0.1932, 0.1241, 0.0798, 0.1071, 0.3490, 0.1161, 0.1724, 0.2624], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0310, 0.0222, 0.0281, 0.0313, 0.0263, 0.0252, 0.0267], + device='cuda:0'), out_proj_covar=tensor([1.1536e-04, 1.2360e-04, 8.8345e-05, 1.1172e-04, 1.2746e-04, 1.0486e-04, + 1.0182e-04, 1.0627e-04], device='cuda:0') +2023-04-27 08:46:49,772 INFO [finetune.py:976] (0/7) Epoch 16, batch 3800, loss[loss=0.1957, simple_loss=0.2716, pruned_loss=0.05985, over 4832.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2528, pruned_loss=0.05639, over 952457.91 frames. ], batch size: 49, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:47:09,436 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89730.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:47:10,021 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=89731.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:47:21,750 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89741.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:47:31,967 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.563e+02 1.915e+02 2.320e+02 3.976e+02, threshold=3.830e+02, percent-clipped=0.0 +2023-04-27 08:47:43,648 INFO [finetune.py:976] (0/7) Epoch 16, batch 3850, loss[loss=0.1756, simple_loss=0.2362, pruned_loss=0.05752, over 4131.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2512, pruned_loss=0.0557, over 952602.96 frames. ], batch size: 18, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:47:43,750 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.0809, 3.4576, 1.4633, 2.2836, 2.7984, 2.4892, 4.6771, 2.7845], + device='cuda:0'), covar=tensor([0.0516, 0.0705, 0.0723, 0.1081, 0.0404, 0.0684, 0.0293, 0.0425], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0049, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 08:47:48,608 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 08:47:58,291 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89781.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:48:12,398 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89795.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 08:48:12,579 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 08:48:26,725 INFO [finetune.py:976] (0/7) Epoch 16, batch 3900, loss[loss=0.174, simple_loss=0.2411, pruned_loss=0.05347, over 4925.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2496, pruned_loss=0.05603, over 953432.72 frames. ], batch size: 38, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:48:33,313 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=89825.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:48:48,871 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.776e+01 1.757e+02 2.140e+02 2.503e+02 6.967e+02, threshold=4.281e+02, percent-clipped=3.0 +2023-04-27 08:48:59,079 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=89865.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:48:59,114 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=89865.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:48:59,621 INFO [finetune.py:976] (0/7) Epoch 16, batch 3950, loss[loss=0.1493, simple_loss=0.2217, pruned_loss=0.03848, over 4819.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2459, pruned_loss=0.05464, over 953781.90 frames. ], batch size: 30, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:49:05,391 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=89873.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:49:33,476 INFO [finetune.py:976] (0/7) Epoch 16, batch 4000, loss[loss=0.1688, simple_loss=0.221, pruned_loss=0.05833, over 4034.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2456, pruned_loss=0.05448, over 953112.20 frames. ], batch size: 17, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:49:34,756 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.2218, 3.1945, 2.5152, 3.8081, 3.2382, 3.2448, 1.3909, 3.2683], + device='cuda:0'), covar=tensor([0.1898, 0.1424, 0.3786, 0.2265, 0.3495, 0.2129, 0.5827, 0.2948], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0215, 0.0253, 0.0305, 0.0301, 0.0251, 0.0275, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 08:49:41,170 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=89926.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 08:50:03,077 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1230, 2.4068, 1.0962, 1.4127, 1.9162, 1.3778, 2.9852, 1.6965], + device='cuda:0'), covar=tensor([0.0571, 0.0595, 0.0684, 0.1158, 0.0412, 0.0875, 0.0264, 0.0549], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 08:50:10,169 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7879, 1.2914, 1.8221, 2.2662, 1.8873, 1.7937, 1.8632, 1.8119], + device='cuda:0'), covar=tensor([0.4728, 0.6819, 0.6539, 0.5855, 0.6176, 0.7861, 0.8070, 0.8380], + device='cuda:0'), in_proj_covar=tensor([0.0418, 0.0406, 0.0492, 0.0506, 0.0446, 0.0471, 0.0475, 0.0479], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:50:11,839 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.112e+02 1.722e+02 2.012e+02 2.405e+02 4.800e+02, threshold=4.024e+02, percent-clipped=2.0 +2023-04-27 08:50:33,227 INFO [finetune.py:976] (0/7) Epoch 16, batch 4050, loss[loss=0.1363, simple_loss=0.1964, pruned_loss=0.0381, over 4147.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2491, pruned_loss=0.05517, over 952910.83 frames. ], batch size: 17, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:51:08,411 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1998, 1.5780, 2.0359, 2.4499, 2.0542, 1.6939, 1.4877, 1.7914], + device='cuda:0'), covar=tensor([0.3510, 0.3721, 0.1838, 0.2536, 0.2975, 0.2747, 0.4124, 0.2123], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0248, 0.0226, 0.0316, 0.0218, 0.0231, 0.0229, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 08:51:18,261 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-90000.pt +2023-04-27 08:51:40,301 INFO [finetune.py:976] (0/7) Epoch 16, batch 4100, loss[loss=0.1795, simple_loss=0.2525, pruned_loss=0.05319, over 4824.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2523, pruned_loss=0.05622, over 952049.94 frames. ], batch size: 33, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:52:02,658 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90030.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:52:02,686 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8915, 1.7990, 1.6507, 1.4228, 1.8246, 1.5289, 2.1894, 1.4827], + device='cuda:0'), covar=tensor([0.3064, 0.1519, 0.3888, 0.2404, 0.1507, 0.1854, 0.1410, 0.3735], + device='cuda:0'), in_proj_covar=tensor([0.0347, 0.0350, 0.0434, 0.0359, 0.0388, 0.0386, 0.0376, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:52:12,520 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90036.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:52:13,136 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7294, 3.6636, 2.8005, 4.2767, 3.6886, 3.7266, 1.4150, 3.6331], + device='cuda:0'), covar=tensor([0.1871, 0.1089, 0.2987, 0.1778, 0.3441, 0.1820, 0.6505, 0.2526], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0213, 0.0252, 0.0303, 0.0298, 0.0249, 0.0272, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 08:52:15,537 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7341, 1.2843, 4.6457, 4.3898, 4.1075, 4.3871, 4.1129, 4.0531], + device='cuda:0'), covar=tensor([0.7630, 0.5786, 0.1067, 0.1674, 0.1109, 0.1721, 0.2259, 0.1618], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0301, 0.0400, 0.0399, 0.0345, 0.0405, 0.0307, 0.0360], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:52:27,090 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.746e+02 2.004e+02 2.346e+02 5.282e+02, threshold=4.007e+02, percent-clipped=2.0 +2023-04-27 08:52:44,837 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.75 vs. limit=5.0 +2023-04-27 08:52:48,659 INFO [finetune.py:976] (0/7) Epoch 16, batch 4150, loss[loss=0.2055, simple_loss=0.2811, pruned_loss=0.06491, over 4729.00 frames. ], tot_loss[loss=0.1835, simple_loss=0.2538, pruned_loss=0.05662, over 952120.95 frames. ], batch size: 59, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:53:06,915 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=90078.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:53:09,295 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90081.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:53:26,025 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90095.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 08:53:38,040 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3567, 1.6627, 1.7320, 1.8902, 1.7039, 1.8087, 1.7816, 1.7631], + device='cuda:0'), covar=tensor([0.4563, 0.6401, 0.5193, 0.5026, 0.6385, 0.8213, 0.6340, 0.5457], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0373, 0.0318, 0.0333, 0.0345, 0.0396, 0.0354, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 08:53:50,593 INFO [finetune.py:976] (0/7) Epoch 16, batch 4200, loss[loss=0.1999, simple_loss=0.2676, pruned_loss=0.06606, over 4905.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2536, pruned_loss=0.05614, over 951357.98 frames. ], batch size: 36, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:53:59,894 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=90129.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:54:00,557 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90130.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:54:14,873 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=90143.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 08:54:24,333 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.851e+01 1.635e+02 1.941e+02 2.339e+02 3.883e+02, threshold=3.882e+02, percent-clipped=0.0 +2023-04-27 08:54:34,083 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90165.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:54:34,610 INFO [finetune.py:976] (0/7) Epoch 16, batch 4250, loss[loss=0.1609, simple_loss=0.2217, pruned_loss=0.05009, over 4844.00 frames. ], tot_loss[loss=0.1809, simple_loss=0.2511, pruned_loss=0.05536, over 953451.65 frames. ], batch size: 49, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:54:52,905 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90191.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:55:06,202 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=90213.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:55:08,446 INFO [finetune.py:976] (0/7) Epoch 16, batch 4300, loss[loss=0.1381, simple_loss=0.199, pruned_loss=0.03859, over 4810.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2495, pruned_loss=0.05498, over 955339.88 frames. ], batch size: 25, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:55:11,519 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90221.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 08:55:11,697 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-27 08:55:19,269 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90232.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:55:29,816 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5512, 3.2485, 0.9229, 1.7835, 1.8870, 2.3150, 1.8484, 0.9747], + device='cuda:0'), covar=tensor([0.1282, 0.0850, 0.1941, 0.1214, 0.0968, 0.0930, 0.1456, 0.1927], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0245, 0.0138, 0.0121, 0.0132, 0.0154, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 08:55:29,873 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1273, 1.6292, 2.0167, 2.4539, 1.9392, 1.5987, 1.2769, 1.7667], + device='cuda:0'), covar=tensor([0.3207, 0.3020, 0.1609, 0.2054, 0.2571, 0.2645, 0.4042, 0.1985], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0248, 0.0226, 0.0316, 0.0218, 0.0230, 0.0228, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 08:55:32,135 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.571e+02 1.935e+02 2.216e+02 3.879e+02, threshold=3.870e+02, percent-clipped=0.0 +2023-04-27 08:55:41,846 INFO [finetune.py:976] (0/7) Epoch 16, batch 4350, loss[loss=0.1732, simple_loss=0.2393, pruned_loss=0.05357, over 4846.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2466, pruned_loss=0.05406, over 957597.41 frames. ], batch size: 49, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:55:56,673 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-27 08:56:00,608 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90293.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:56:13,361 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-27 08:56:15,530 INFO [finetune.py:976] (0/7) Epoch 16, batch 4400, loss[loss=0.1957, simple_loss=0.273, pruned_loss=0.05916, over 4900.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2481, pruned_loss=0.05532, over 956610.96 frames. ], batch size: 43, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:56:34,470 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90336.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:56:55,340 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.655e+02 1.875e+02 2.353e+02 3.798e+02, threshold=3.751e+02, percent-clipped=0.0 +2023-04-27 08:56:56,099 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90351.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:57:10,225 INFO [finetune.py:976] (0/7) Epoch 16, batch 4450, loss[loss=0.1693, simple_loss=0.241, pruned_loss=0.04883, over 4755.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2507, pruned_loss=0.05562, over 956100.15 frames. ], batch size: 28, lr: 3.43e-03, grad_scale: 32.0 +2023-04-27 08:57:31,493 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 08:57:33,653 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=90384.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:57:33,810 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 08:57:40,812 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7833, 1.2735, 1.3694, 1.4734, 1.9091, 1.4662, 1.2048, 1.3342], + device='cuda:0'), covar=tensor([0.1442, 0.1498, 0.1785, 0.1337, 0.0829, 0.1506, 0.2128, 0.2035], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0311, 0.0351, 0.0290, 0.0327, 0.0311, 0.0301, 0.0365], + device='cuda:0'), out_proj_covar=tensor([6.3299e-05, 6.4896e-05, 7.4932e-05, 5.9149e-05, 6.8162e-05, 6.5438e-05, + 6.3656e-05, 7.7942e-05], device='cuda:0') +2023-04-27 08:57:56,359 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7213, 2.4938, 1.9997, 2.2766, 2.4080, 2.0666, 2.9166, 1.7295], + device='cuda:0'), covar=tensor([0.3413, 0.1784, 0.3727, 0.2768, 0.1722, 0.2510, 0.1983, 0.3918], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0349, 0.0433, 0.0357, 0.0387, 0.0385, 0.0373, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:57:59,390 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90412.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:58:07,139 INFO [finetune.py:976] (0/7) Epoch 16, batch 4500, loss[loss=0.2117, simple_loss=0.2877, pruned_loss=0.06782, over 4913.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2524, pruned_loss=0.05612, over 955274.67 frames. ], batch size: 37, lr: 3.43e-03, grad_scale: 64.0 +2023-04-27 08:58:09,655 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6765, 3.5217, 0.7972, 1.8204, 2.0080, 2.4701, 1.9990, 1.0395], + device='cuda:0'), covar=tensor([0.1364, 0.0956, 0.2171, 0.1304, 0.1019, 0.1026, 0.1582, 0.1880], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0245, 0.0138, 0.0121, 0.0132, 0.0154, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 08:58:52,081 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.589e+02 1.862e+02 2.271e+02 3.876e+02, threshold=3.723e+02, percent-clipped=1.0 +2023-04-27 08:59:04,454 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90459.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:59:05,059 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2792, 1.5719, 1.3634, 1.8736, 1.7409, 1.9412, 1.4412, 3.6348], + device='cuda:0'), covar=tensor([0.0628, 0.0781, 0.0857, 0.1180, 0.0626, 0.0467, 0.0731, 0.0151], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 08:59:14,179 INFO [finetune.py:976] (0/7) Epoch 16, batch 4550, loss[loss=0.1807, simple_loss=0.2491, pruned_loss=0.05617, over 4688.00 frames. ], tot_loss[loss=0.1828, simple_loss=0.2533, pruned_loss=0.05616, over 954543.18 frames. ], batch size: 59, lr: 3.43e-03, grad_scale: 64.0 +2023-04-27 08:59:14,874 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6690, 1.3657, 4.4658, 4.1988, 3.8908, 4.2460, 4.0868, 3.8970], + device='cuda:0'), covar=tensor([0.6776, 0.5705, 0.0936, 0.1397, 0.1126, 0.1430, 0.1517, 0.1676], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0306, 0.0402, 0.0403, 0.0349, 0.0410, 0.0309, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 08:59:37,649 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90486.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 08:59:55,483 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90496.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:00:15,724 INFO [finetune.py:976] (0/7) Epoch 16, batch 4600, loss[loss=0.1951, simple_loss=0.2684, pruned_loss=0.06094, over 4869.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2515, pruned_loss=0.05496, over 953048.89 frames. ], batch size: 34, lr: 3.43e-03, grad_scale: 64.0 +2023-04-27 09:00:18,324 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90520.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:00:18,915 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90521.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 09:00:37,739 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.675e+02 1.952e+02 2.329e+02 6.121e+02, threshold=3.905e+02, percent-clipped=3.0 +2023-04-27 09:00:43,127 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90557.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:00:49,345 INFO [finetune.py:976] (0/7) Epoch 16, batch 4650, loss[loss=0.1779, simple_loss=0.2442, pruned_loss=0.0558, over 4920.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.2488, pruned_loss=0.05451, over 953671.69 frames. ], batch size: 37, lr: 3.42e-03, grad_scale: 64.0 +2023-04-27 09:00:51,259 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=90569.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:01:03,220 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90588.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:01:23,250 INFO [finetune.py:976] (0/7) Epoch 16, batch 4700, loss[loss=0.1371, simple_loss=0.2037, pruned_loss=0.03531, over 4920.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2457, pruned_loss=0.05333, over 955108.65 frames. ], batch size: 43, lr: 3.42e-03, grad_scale: 64.0 +2023-04-27 09:01:45,413 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.002e+02 1.583e+02 1.845e+02 2.338e+02 5.124e+02, threshold=3.690e+02, percent-clipped=1.0 +2023-04-27 09:02:01,944 INFO [finetune.py:976] (0/7) Epoch 16, batch 4750, loss[loss=0.1811, simple_loss=0.258, pruned_loss=0.05205, over 4915.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2452, pruned_loss=0.05317, over 957024.22 frames. ], batch size: 46, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:02:51,138 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90707.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:03:07,873 INFO [finetune.py:976] (0/7) Epoch 16, batch 4800, loss[loss=0.2456, simple_loss=0.3115, pruned_loss=0.08989, over 4831.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2478, pruned_loss=0.05412, over 956172.90 frames. ], batch size: 47, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:03:36,199 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.106e+02 1.708e+02 2.005e+02 2.486e+02 4.014e+02, threshold=4.009e+02, percent-clipped=3.0 +2023-04-27 09:03:47,213 INFO [finetune.py:976] (0/7) Epoch 16, batch 4850, loss[loss=0.186, simple_loss=0.2525, pruned_loss=0.0597, over 4905.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2507, pruned_loss=0.05461, over 956200.11 frames. ], batch size: 43, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:04:00,346 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90786.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:04:07,055 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7760, 3.5229, 0.8717, 1.9327, 2.0842, 2.5497, 2.0246, 1.0030], + device='cuda:0'), covar=tensor([0.1308, 0.0931, 0.2019, 0.1242, 0.1046, 0.0994, 0.1600, 0.2093], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0244, 0.0138, 0.0121, 0.0132, 0.0154, 0.0119, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:04:36,228 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90815.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:04:36,789 INFO [finetune.py:976] (0/7) Epoch 16, batch 4900, loss[loss=0.1843, simple_loss=0.2561, pruned_loss=0.05626, over 4775.00 frames. ], tot_loss[loss=0.182, simple_loss=0.253, pruned_loss=0.05554, over 956486.94 frames. ], batch size: 25, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:04:58,779 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-27 09:05:00,658 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-27 09:05:01,131 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=90834.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:05:03,041 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0519, 2.5125, 1.0933, 1.3495, 1.9177, 1.3113, 3.1172, 1.6693], + device='cuda:0'), covar=tensor([0.0676, 0.0585, 0.0711, 0.1279, 0.0430, 0.0971, 0.0275, 0.0618], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0046, 0.0049, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 09:05:12,156 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5721, 1.7917, 0.8329, 1.3003, 1.7006, 1.4353, 1.3362, 1.4246], + device='cuda:0'), covar=tensor([0.0521, 0.0356, 0.0372, 0.0585, 0.0304, 0.0531, 0.0547, 0.0582], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 09:05:20,311 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-27 09:05:21,463 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=90849.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:05:22,604 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.643e+02 1.978e+02 2.332e+02 3.633e+02, threshold=3.955e+02, percent-clipped=0.0 +2023-04-27 09:05:23,822 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=90852.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:05:36,563 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2851, 1.5087, 1.6855, 1.7920, 1.6809, 1.7756, 1.7228, 1.7361], + device='cuda:0'), covar=tensor([0.4272, 0.6148, 0.4792, 0.4829, 0.6036, 0.7955, 0.5645, 0.5357], + device='cuda:0'), in_proj_covar=tensor([0.0332, 0.0374, 0.0318, 0.0334, 0.0345, 0.0397, 0.0354, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 09:05:38,748 INFO [finetune.py:976] (0/7) Epoch 16, batch 4950, loss[loss=0.1855, simple_loss=0.2623, pruned_loss=0.05432, over 4894.00 frames. ], tot_loss[loss=0.1824, simple_loss=0.2538, pruned_loss=0.05549, over 956829.40 frames. ], batch size: 36, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:05:54,239 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=90888.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:06:08,381 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=90910.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:06:12,378 INFO [finetune.py:976] (0/7) Epoch 16, batch 5000, loss[loss=0.1926, simple_loss=0.2686, pruned_loss=0.05824, over 4891.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2521, pruned_loss=0.055, over 957548.68 frames. ], batch size: 35, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:06:27,004 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=90936.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:06:36,101 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.563e+02 1.794e+02 2.191e+02 3.401e+02, threshold=3.587e+02, percent-clipped=0.0 +2023-04-27 09:06:46,205 INFO [finetune.py:976] (0/7) Epoch 16, batch 5050, loss[loss=0.141, simple_loss=0.2183, pruned_loss=0.03181, over 4827.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2497, pruned_loss=0.05507, over 956851.59 frames. ], batch size: 51, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:07:13,894 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91007.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:07:19,801 INFO [finetune.py:976] (0/7) Epoch 16, batch 5100, loss[loss=0.1334, simple_loss=0.1933, pruned_loss=0.03672, over 4207.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2466, pruned_loss=0.05366, over 954843.81 frames. ], batch size: 18, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:07:22,208 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0613, 1.4344, 1.6124, 1.7243, 2.1624, 1.7727, 1.5062, 1.5165], + device='cuda:0'), covar=tensor([0.1436, 0.1465, 0.2006, 0.1323, 0.0912, 0.1449, 0.2041, 0.2240], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0307, 0.0347, 0.0285, 0.0323, 0.0308, 0.0298, 0.0360], + device='cuda:0'), out_proj_covar=tensor([6.2427e-05, 6.4083e-05, 7.3996e-05, 5.8008e-05, 6.7258e-05, 6.4901e-05, + 6.3081e-05, 7.6835e-05], device='cuda:0') +2023-04-27 09:07:43,899 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.518e+02 1.785e+02 2.206e+02 4.900e+02, threshold=3.570e+02, percent-clipped=2.0 +2023-04-27 09:07:46,424 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=91055.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:07:51,974 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3912, 1.9940, 2.3462, 2.7196, 2.7127, 2.1830, 1.9416, 2.4376], + device='cuda:0'), covar=tensor([0.0814, 0.1034, 0.0587, 0.0569, 0.0526, 0.0877, 0.0819, 0.0532], + device='cuda:0'), in_proj_covar=tensor([0.0190, 0.0202, 0.0182, 0.0173, 0.0178, 0.0182, 0.0153, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:07:53,574 INFO [finetune.py:976] (0/7) Epoch 16, batch 5150, loss[loss=0.2295, simple_loss=0.3035, pruned_loss=0.07777, over 4866.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2466, pruned_loss=0.05344, over 956174.77 frames. ], batch size: 44, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:07:53,785 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.87 vs. limit=5.0 +2023-04-27 09:08:33,724 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-27 09:08:48,476 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91115.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:08:48,975 INFO [finetune.py:976] (0/7) Epoch 16, batch 5200, loss[loss=0.1637, simple_loss=0.2371, pruned_loss=0.04511, over 4734.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2504, pruned_loss=0.05481, over 955785.54 frames. ], batch size: 27, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:09:17,735 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8798, 2.4113, 1.1108, 1.2708, 1.7475, 1.2298, 2.8855, 1.4269], + device='cuda:0'), covar=tensor([0.0753, 0.0664, 0.0788, 0.1390, 0.0495, 0.1039, 0.0285, 0.0702], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0046, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 09:09:38,601 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.048e+02 1.654e+02 2.065e+02 2.493e+02 5.806e+02, threshold=4.130e+02, percent-clipped=4.0 +2023-04-27 09:09:39,307 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91152.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:09:49,125 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-27 09:09:51,478 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=91163.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:09:53,288 INFO [finetune.py:976] (0/7) Epoch 16, batch 5250, loss[loss=0.2251, simple_loss=0.284, pruned_loss=0.08306, over 4906.00 frames. ], tot_loss[loss=0.181, simple_loss=0.252, pruned_loss=0.055, over 956419.65 frames. ], batch size: 36, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:10:21,396 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7917, 3.8052, 2.8902, 4.4290, 3.8932, 3.7911, 1.7671, 3.7619], + device='cuda:0'), covar=tensor([0.1793, 0.1128, 0.2835, 0.1380, 0.2949, 0.1637, 0.5623, 0.2427], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0210, 0.0248, 0.0301, 0.0293, 0.0245, 0.0269, 0.0268], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 09:10:44,543 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=91200.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:10:47,639 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91205.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:11:00,424 INFO [finetune.py:976] (0/7) Epoch 16, batch 5300, loss[loss=0.1611, simple_loss=0.2454, pruned_loss=0.03843, over 4886.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2519, pruned_loss=0.0549, over 955224.53 frames. ], batch size: 32, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:11:13,769 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0142, 1.5869, 1.8916, 2.3223, 2.3086, 1.7398, 1.5935, 2.0003], + device='cuda:0'), covar=tensor([0.0857, 0.1286, 0.0820, 0.0620, 0.0631, 0.1041, 0.0837, 0.0695], + device='cuda:0'), in_proj_covar=tensor([0.0190, 0.0203, 0.0183, 0.0174, 0.0178, 0.0183, 0.0154, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:11:17,346 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7268, 1.5650, 2.0898, 2.1166, 1.5448, 1.4826, 1.7290, 1.2060], + device='cuda:0'), covar=tensor([0.0557, 0.0741, 0.0388, 0.0648, 0.0813, 0.1143, 0.0604, 0.0691], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0070, 0.0069, 0.0068, 0.0075, 0.0096, 0.0075, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:11:25,281 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.661e+02 1.899e+02 2.252e+02 3.532e+02, threshold=3.799e+02, percent-clipped=0.0 +2023-04-27 09:11:34,433 INFO [finetune.py:976] (0/7) Epoch 16, batch 5350, loss[loss=0.1638, simple_loss=0.234, pruned_loss=0.04679, over 4181.00 frames. ], tot_loss[loss=0.1805, simple_loss=0.252, pruned_loss=0.05448, over 955325.92 frames. ], batch size: 65, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:11:47,122 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91285.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:11:47,803 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2017, 1.4116, 1.6649, 1.7732, 1.6603, 1.8109, 1.7199, 1.7081], + device='cuda:0'), covar=tensor([0.3836, 0.5186, 0.4388, 0.4341, 0.5346, 0.7157, 0.4980, 0.4643], + device='cuda:0'), in_proj_covar=tensor([0.0333, 0.0374, 0.0319, 0.0334, 0.0345, 0.0396, 0.0354, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 09:12:05,947 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1763, 1.3227, 3.8100, 3.5371, 3.3608, 3.6342, 3.5785, 3.3268], + device='cuda:0'), covar=tensor([0.7324, 0.5524, 0.1035, 0.1779, 0.1157, 0.1603, 0.1763, 0.1422], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0302, 0.0398, 0.0400, 0.0344, 0.0405, 0.0307, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:12:06,733 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.79 vs. limit=5.0 +2023-04-27 09:12:08,304 INFO [finetune.py:976] (0/7) Epoch 16, batch 5400, loss[loss=0.1734, simple_loss=0.2476, pruned_loss=0.04963, over 4828.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2504, pruned_loss=0.05396, over 955535.47 frames. ], batch size: 33, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:12:28,669 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91346.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:12:31,554 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2391, 1.4054, 3.7509, 3.5094, 3.3419, 3.5166, 3.4737, 3.2584], + device='cuda:0'), covar=tensor([0.7052, 0.5161, 0.1103, 0.1820, 0.1133, 0.1875, 0.2014, 0.1550], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0302, 0.0398, 0.0400, 0.0344, 0.0405, 0.0307, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:12:32,094 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.605e+02 2.003e+02 2.320e+02 6.253e+02, threshold=4.007e+02, percent-clipped=1.0 +2023-04-27 09:12:42,142 INFO [finetune.py:976] (0/7) Epoch 16, batch 5450, loss[loss=0.1924, simple_loss=0.2469, pruned_loss=0.06893, over 4826.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.247, pruned_loss=0.05294, over 956375.44 frames. ], batch size: 33, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:13:02,591 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7196, 1.4712, 1.9193, 1.8929, 1.5098, 1.4215, 1.5187, 1.0332], + device='cuda:0'), covar=tensor([0.0490, 0.0993, 0.0479, 0.0700, 0.0818, 0.1066, 0.0740, 0.0698], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0069, 0.0068, 0.0076, 0.0097, 0.0075, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:13:15,970 INFO [finetune.py:976] (0/7) Epoch 16, batch 5500, loss[loss=0.1952, simple_loss=0.2664, pruned_loss=0.06201, over 4161.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2443, pruned_loss=0.05258, over 952400.21 frames. ], batch size: 65, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:13:35,345 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8183, 2.2476, 0.9211, 1.2554, 1.4802, 1.1768, 2.5121, 1.3954], + device='cuda:0'), covar=tensor([0.0737, 0.0573, 0.0636, 0.1200, 0.0500, 0.1029, 0.0295, 0.0677], + device='cuda:0'), in_proj_covar=tensor([0.0052, 0.0067, 0.0049, 0.0047, 0.0050, 0.0053, 0.0076, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 09:13:38,281 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.525e+01 1.617e+02 1.882e+02 2.275e+02 4.330e+02, threshold=3.765e+02, percent-clipped=1.0 +2023-04-27 09:13:49,958 INFO [finetune.py:976] (0/7) Epoch 16, batch 5550, loss[loss=0.211, simple_loss=0.295, pruned_loss=0.06353, over 4852.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2476, pruned_loss=0.05446, over 952407.83 frames. ], batch size: 47, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:14:14,695 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91505.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:14:32,823 INFO [finetune.py:976] (0/7) Epoch 16, batch 5600, loss[loss=0.2083, simple_loss=0.2883, pruned_loss=0.06412, over 4816.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2523, pruned_loss=0.05648, over 953791.29 frames. ], batch size: 41, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:15:04,815 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91542.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 09:15:04,854 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2339, 1.7324, 2.1146, 2.4372, 2.1021, 1.6107, 1.3460, 1.8991], + device='cuda:0'), covar=tensor([0.3090, 0.2931, 0.1726, 0.2439, 0.2594, 0.2660, 0.4287, 0.2053], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0247, 0.0227, 0.0315, 0.0217, 0.0231, 0.0228, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 09:15:04,870 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7111, 1.2037, 1.7503, 2.1964, 1.8224, 1.6873, 1.7351, 1.7075], + device='cuda:0'), covar=tensor([0.4494, 0.7124, 0.6591, 0.5624, 0.5946, 0.7795, 0.7796, 0.8750], + device='cuda:0'), in_proj_covar=tensor([0.0418, 0.0407, 0.0493, 0.0504, 0.0447, 0.0470, 0.0476, 0.0483], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:15:15,401 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.634e+02 1.984e+02 2.410e+02 6.572e+02, threshold=3.968e+02, percent-clipped=2.0 +2023-04-27 09:15:16,632 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=91553.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:15:28,933 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.21 vs. limit=5.0 +2023-04-27 09:15:29,903 INFO [finetune.py:976] (0/7) Epoch 16, batch 5650, loss[loss=0.2294, simple_loss=0.2944, pruned_loss=0.08223, over 4743.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2534, pruned_loss=0.05631, over 953752.71 frames. ], batch size: 59, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:16:20,698 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91603.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 09:16:34,226 INFO [finetune.py:976] (0/7) Epoch 16, batch 5700, loss[loss=0.1312, simple_loss=0.2037, pruned_loss=0.02938, over 4386.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.249, pruned_loss=0.055, over 938728.68 frames. ], batch size: 19, lr: 3.42e-03, grad_scale: 32.0 +2023-04-27 09:17:06,020 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91641.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:17:07,348 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-16.pt +2023-04-27 09:17:19,302 INFO [finetune.py:976] (0/7) Epoch 17, batch 0, loss[loss=0.2527, simple_loss=0.3084, pruned_loss=0.0985, over 4802.00 frames. ], tot_loss[loss=0.2527, simple_loss=0.3084, pruned_loss=0.0985, over 4802.00 frames. ], batch size: 39, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:17:19,303 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 09:17:21,356 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7490, 1.0378, 1.6617, 2.2118, 1.8523, 1.6787, 1.6850, 1.6859], + device='cuda:0'), covar=tensor([0.4907, 0.7126, 0.6591, 0.6083, 0.6265, 0.8627, 0.8015, 0.8360], + device='cuda:0'), in_proj_covar=tensor([0.0420, 0.0408, 0.0496, 0.0506, 0.0449, 0.0471, 0.0479, 0.0485], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:17:21,415 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3869, 1.3619, 3.8163, 3.5080, 3.4145, 3.6122, 3.7381, 3.3925], + device='cuda:0'), covar=tensor([0.7121, 0.5234, 0.1274, 0.2135, 0.1209, 0.1543, 0.0757, 0.1533], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0303, 0.0400, 0.0400, 0.0345, 0.0408, 0.0307, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:17:21,934 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5216, 1.3433, 1.7307, 1.6942, 1.3544, 1.2852, 1.3842, 0.9164], + device='cuda:0'), covar=tensor([0.0511, 0.0763, 0.0488, 0.0547, 0.0794, 0.1129, 0.0561, 0.0632], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0069, 0.0068, 0.0075, 0.0097, 0.0075, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:17:40,815 INFO [finetune.py:1010] (0/7) Epoch 17, validation: loss=0.1535, simple_loss=0.2247, pruned_loss=0.04111, over 2265189.00 frames. +2023-04-27 09:17:40,816 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 09:17:45,697 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.575e+02 1.838e+02 2.202e+02 3.811e+02, threshold=3.676e+02, percent-clipped=0.0 +2023-04-27 09:18:20,574 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-27 09:18:24,711 INFO [finetune.py:976] (0/7) Epoch 17, batch 50, loss[loss=0.1296, simple_loss=0.2115, pruned_loss=0.02383, over 4754.00 frames. ], tot_loss[loss=0.1853, simple_loss=0.2561, pruned_loss=0.0573, over 216479.42 frames. ], batch size: 28, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:18:32,666 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1500, 2.2057, 1.9148, 1.8940, 2.3036, 1.7432, 2.8006, 1.6234], + device='cuda:0'), covar=tensor([0.4288, 0.2260, 0.5222, 0.3374, 0.1881, 0.3031, 0.1640, 0.5147], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0348, 0.0432, 0.0359, 0.0385, 0.0383, 0.0374, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:18:39,429 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-27 09:18:45,329 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2235, 3.3290, 0.8545, 1.5969, 1.7174, 2.2183, 1.9217, 0.9950], + device='cuda:0'), covar=tensor([0.2106, 0.1766, 0.2627, 0.2035, 0.1521, 0.1547, 0.1829, 0.2322], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0243, 0.0137, 0.0121, 0.0132, 0.0153, 0.0117, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:18:57,080 INFO [finetune.py:976] (0/7) Epoch 17, batch 100, loss[loss=0.1654, simple_loss=0.2216, pruned_loss=0.05462, over 4905.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2498, pruned_loss=0.05641, over 379711.10 frames. ], batch size: 37, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:19:02,435 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.653e+02 1.974e+02 2.303e+02 4.426e+02, threshold=3.948e+02, percent-clipped=2.0 +2023-04-27 09:19:03,964 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 09:19:30,053 INFO [finetune.py:976] (0/7) Epoch 17, batch 150, loss[loss=0.1579, simple_loss=0.228, pruned_loss=0.04392, over 4862.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2429, pruned_loss=0.05323, over 507948.81 frames. ], batch size: 31, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:20:03,532 INFO [finetune.py:976] (0/7) Epoch 17, batch 200, loss[loss=0.2225, simple_loss=0.287, pruned_loss=0.07901, over 4824.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2442, pruned_loss=0.05383, over 607554.38 frames. ], batch size: 38, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:20:04,221 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1712, 1.4540, 1.3309, 1.7559, 1.5942, 1.7751, 1.3551, 3.0909], + device='cuda:0'), covar=tensor([0.0636, 0.0816, 0.0866, 0.1256, 0.0688, 0.0456, 0.0769, 0.0193], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 09:20:08,960 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.596e+01 1.677e+02 1.899e+02 2.270e+02 6.599e+02, threshold=3.797e+02, percent-clipped=2.0 +2023-04-27 09:20:35,885 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91891.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:20:36,989 INFO [finetune.py:976] (0/7) Epoch 17, batch 250, loss[loss=0.1745, simple_loss=0.2481, pruned_loss=0.05048, over 4897.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.249, pruned_loss=0.05542, over 687100.10 frames. ], batch size: 35, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:20:40,142 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=91898.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 09:20:41,404 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0095, 1.9749, 2.4456, 2.6063, 1.8450, 1.8165, 1.9944, 1.0680], + device='cuda:0'), covar=tensor([0.0558, 0.0823, 0.0412, 0.0680, 0.0783, 0.1089, 0.0710, 0.0840], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0069, 0.0068, 0.0067, 0.0075, 0.0096, 0.0074, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:21:05,108 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6776, 1.4026, 1.6132, 1.9524, 1.9736, 1.6310, 1.3494, 1.7580], + device='cuda:0'), covar=tensor([0.0808, 0.1216, 0.0757, 0.0498, 0.0607, 0.0841, 0.0792, 0.0576], + device='cuda:0'), in_proj_covar=tensor([0.0189, 0.0200, 0.0181, 0.0172, 0.0176, 0.0181, 0.0152, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:21:08,452 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=91941.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:21:10,055 INFO [finetune.py:976] (0/7) Epoch 17, batch 300, loss[loss=0.1757, simple_loss=0.2436, pruned_loss=0.05393, over 4862.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2517, pruned_loss=0.05551, over 747744.26 frames. ], batch size: 31, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:21:14,177 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 09:21:15,845 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.651e+02 1.943e+02 2.275e+02 4.588e+02, threshold=3.886e+02, percent-clipped=2.0 +2023-04-27 09:21:16,616 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=91952.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:21:17,780 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.4752, 3.3938, 2.8624, 4.0260, 3.2788, 3.4407, 1.7697, 3.4542], + device='cuda:0'), covar=tensor([0.1771, 0.1286, 0.3640, 0.1413, 0.3151, 0.1823, 0.4855, 0.2441], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0214, 0.0253, 0.0306, 0.0300, 0.0250, 0.0275, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 09:21:18,451 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91955.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:21:30,540 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7192, 1.9431, 0.9486, 1.4285, 2.0853, 1.5790, 1.4577, 1.5932], + device='cuda:0'), covar=tensor([0.0474, 0.0351, 0.0320, 0.0529, 0.0251, 0.0513, 0.0505, 0.0531], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 09:21:41,951 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6827, 1.6494, 2.1310, 2.1380, 1.6173, 1.4034, 1.7468, 0.9645], + device='cuda:0'), covar=tensor([0.0699, 0.0776, 0.0485, 0.0772, 0.0689, 0.1243, 0.0784, 0.0837], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0069, 0.0068, 0.0067, 0.0074, 0.0096, 0.0074, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:21:43,852 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.79 vs. limit=5.0 +2023-04-27 09:21:51,285 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=91981.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 09:21:56,127 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=91989.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:22:03,160 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 09:22:04,086 INFO [finetune.py:976] (0/7) Epoch 17, batch 350, loss[loss=0.1827, simple_loss=0.2617, pruned_loss=0.05183, over 4821.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2524, pruned_loss=0.05541, over 794826.64 frames. ], batch size: 33, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:22:14,506 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-92000.pt +2023-04-27 09:22:36,913 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92016.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:22:57,026 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7722, 2.7359, 2.2351, 3.2324, 2.7882, 2.7785, 1.2552, 2.7547], + device='cuda:0'), covar=tensor([0.2162, 0.1671, 0.3739, 0.3078, 0.2910, 0.2136, 0.5204, 0.3007], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0213, 0.0252, 0.0306, 0.0299, 0.0249, 0.0274, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 09:23:09,169 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92042.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 09:23:09,648 INFO [finetune.py:976] (0/7) Epoch 17, batch 400, loss[loss=0.1778, simple_loss=0.2491, pruned_loss=0.05327, over 4789.00 frames. ], tot_loss[loss=0.1814, simple_loss=0.2525, pruned_loss=0.05511, over 830299.96 frames. ], batch size: 51, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:23:21,384 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.614e+02 1.906e+02 2.282e+02 3.471e+02, threshold=3.811e+02, percent-clipped=0.0 +2023-04-27 09:23:31,585 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 09:24:03,175 INFO [finetune.py:976] (0/7) Epoch 17, batch 450, loss[loss=0.179, simple_loss=0.2457, pruned_loss=0.05613, over 4875.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2506, pruned_loss=0.05435, over 858483.75 frames. ], batch size: 34, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:24:36,847 INFO [finetune.py:976] (0/7) Epoch 17, batch 500, loss[loss=0.1387, simple_loss=0.2122, pruned_loss=0.03257, over 4768.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2478, pruned_loss=0.0534, over 880301.42 frames. ], batch size: 28, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:24:42,156 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.066e+02 1.524e+02 1.904e+02 2.252e+02 3.809e+02, threshold=3.808e+02, percent-clipped=0.0 +2023-04-27 09:24:52,969 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2594, 1.7818, 2.2680, 2.5587, 2.1891, 1.7215, 1.4171, 2.0509], + device='cuda:0'), covar=tensor([0.3470, 0.3273, 0.1604, 0.2714, 0.2766, 0.2795, 0.4083, 0.2071], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0246, 0.0226, 0.0314, 0.0216, 0.0231, 0.0229, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 09:24:56,955 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8149, 4.2872, 0.7031, 2.1032, 2.4177, 2.5579, 2.4281, 0.8866], + device='cuda:0'), covar=tensor([0.1504, 0.0882, 0.2456, 0.1386, 0.1116, 0.1240, 0.1538, 0.2291], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0244, 0.0138, 0.0122, 0.0132, 0.0154, 0.0118, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:25:10,286 INFO [finetune.py:976] (0/7) Epoch 17, batch 550, loss[loss=0.1674, simple_loss=0.2346, pruned_loss=0.05009, over 4889.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2456, pruned_loss=0.05311, over 897555.28 frames. ], batch size: 32, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:25:13,461 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92198.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 09:25:44,127 INFO [finetune.py:976] (0/7) Epoch 17, batch 600, loss[loss=0.1342, simple_loss=0.2044, pruned_loss=0.03194, over 4723.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2459, pruned_loss=0.0541, over 910732.23 frames. ], batch size: 23, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:25:46,054 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=92246.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 09:25:46,671 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92247.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:25:49,031 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.221e+02 1.690e+02 1.979e+02 2.477e+02 6.011e+02, threshold=3.959e+02, percent-clipped=1.0 +2023-04-27 09:25:59,630 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3469, 1.8208, 2.3180, 2.6836, 2.2317, 1.8301, 1.4713, 1.9526], + device='cuda:0'), covar=tensor([0.3685, 0.3553, 0.1593, 0.2554, 0.2817, 0.2716, 0.4160, 0.2273], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0246, 0.0225, 0.0314, 0.0216, 0.0230, 0.0228, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 09:26:10,264 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3080, 1.8246, 2.3073, 2.6575, 2.1862, 1.7848, 1.4453, 1.9530], + device='cuda:0'), covar=tensor([0.3515, 0.3281, 0.1666, 0.2297, 0.2748, 0.3004, 0.4239, 0.2176], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0246, 0.0226, 0.0314, 0.0216, 0.0231, 0.0228, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 09:26:17,387 INFO [finetune.py:976] (0/7) Epoch 17, batch 650, loss[loss=0.2085, simple_loss=0.2848, pruned_loss=0.06611, over 4833.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2476, pruned_loss=0.05409, over 921426.51 frames. ], batch size: 40, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:26:23,015 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4303, 3.6048, 0.7059, 1.8282, 1.8463, 2.4269, 1.8890, 0.9500], + device='cuda:0'), covar=tensor([0.1513, 0.0763, 0.2287, 0.1299, 0.1174, 0.1052, 0.1530, 0.2160], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0245, 0.0139, 0.0122, 0.0133, 0.0154, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:26:29,958 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92311.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:26:56,488 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92337.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 09:27:00,059 INFO [finetune.py:976] (0/7) Epoch 17, batch 700, loss[loss=0.1848, simple_loss=0.2466, pruned_loss=0.06154, over 4780.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2473, pruned_loss=0.05332, over 929792.46 frames. ], batch size: 26, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:27:10,671 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.019e+02 1.654e+02 1.859e+02 2.105e+02 3.715e+02, threshold=3.718e+02, percent-clipped=1.0 +2023-04-27 09:27:35,727 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7180, 2.0804, 1.6304, 1.3917, 1.2876, 1.2756, 1.6454, 1.2383], + device='cuda:0'), covar=tensor([0.1666, 0.1339, 0.1482, 0.1788, 0.2313, 0.2023, 0.0985, 0.2030], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0215, 0.0170, 0.0206, 0.0202, 0.0186, 0.0157, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 09:27:37,693 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-27 09:27:49,356 INFO [finetune.py:976] (0/7) Epoch 17, batch 750, loss[loss=0.1831, simple_loss=0.2455, pruned_loss=0.06033, over 4700.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2497, pruned_loss=0.05435, over 935013.96 frames. ], batch size: 23, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:28:11,328 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9411, 1.8369, 1.6892, 1.5069, 1.9426, 1.5700, 2.5054, 1.5054], + device='cuda:0'), covar=tensor([0.4268, 0.2223, 0.5576, 0.3067, 0.1842, 0.2792, 0.1445, 0.4893], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0349, 0.0434, 0.0360, 0.0386, 0.0384, 0.0374, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:28:44,566 INFO [finetune.py:976] (0/7) Epoch 17, batch 800, loss[loss=0.1743, simple_loss=0.2484, pruned_loss=0.05008, over 4826.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2492, pruned_loss=0.05359, over 939859.70 frames. ], batch size: 33, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:28:54,298 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6231, 2.0306, 1.6520, 1.8864, 1.4923, 1.6737, 1.5811, 1.3496], + device='cuda:0'), covar=tensor([0.1936, 0.1592, 0.0961, 0.1309, 0.3618, 0.1233, 0.1977, 0.2505], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0307, 0.0220, 0.0279, 0.0314, 0.0261, 0.0251, 0.0267], + device='cuda:0'), out_proj_covar=tensor([1.1479e-04, 1.2211e-04, 8.7661e-05, 1.1067e-04, 1.2760e-04, 1.0373e-04, + 1.0156e-04, 1.0594e-04], device='cuda:0') +2023-04-27 09:28:54,787 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.529e+02 1.784e+02 2.077e+02 4.006e+02, threshold=3.568e+02, percent-clipped=1.0 +2023-04-27 09:28:54,991 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 09:29:08,912 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2175, 1.7203, 2.1435, 2.4259, 2.0819, 1.7084, 1.3017, 1.8491], + device='cuda:0'), covar=tensor([0.3467, 0.3541, 0.1716, 0.2480, 0.2638, 0.2701, 0.4202, 0.2295], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0248, 0.0227, 0.0317, 0.0218, 0.0232, 0.0230, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 09:29:21,976 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-27 09:29:31,910 INFO [finetune.py:976] (0/7) Epoch 17, batch 850, loss[loss=0.1806, simple_loss=0.2518, pruned_loss=0.05468, over 4848.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2476, pruned_loss=0.05317, over 942104.96 frames. ], batch size: 44, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:29:33,281 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1173, 2.1396, 1.3636, 1.7382, 2.2630, 1.9387, 1.8281, 1.9180], + device='cuda:0'), covar=tensor([0.0455, 0.0325, 0.0319, 0.0519, 0.0247, 0.0469, 0.0488, 0.0519], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 09:29:39,289 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7923, 3.7643, 2.6724, 4.4746, 3.9052, 3.8199, 1.8987, 3.7586], + device='cuda:0'), covar=tensor([0.1936, 0.1126, 0.2904, 0.1646, 0.3483, 0.1762, 0.5381, 0.2395], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0213, 0.0251, 0.0304, 0.0297, 0.0249, 0.0273, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 09:29:48,931 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2471, 1.9416, 1.8579, 2.1220, 2.0326, 1.9681, 1.6877, 4.2609], + device='cuda:0'), covar=tensor([0.0557, 0.0720, 0.0696, 0.1042, 0.0558, 0.0546, 0.0671, 0.0095], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0039, 0.0037, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 09:30:05,435 INFO [finetune.py:976] (0/7) Epoch 17, batch 900, loss[loss=0.1439, simple_loss=0.2171, pruned_loss=0.03537, over 4867.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.245, pruned_loss=0.05236, over 944472.55 frames. ], batch size: 31, lr: 3.41e-03, grad_scale: 32.0 +2023-04-27 09:30:07,984 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92547.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:30:10,295 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.562e+01 1.556e+02 1.849e+02 2.275e+02 6.056e+02, threshold=3.698e+02, percent-clipped=3.0 +2023-04-27 09:30:15,986 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9198, 1.5263, 2.0749, 2.4062, 2.0233, 1.9597, 2.0146, 1.9864], + device='cuda:0'), covar=tensor([0.4968, 0.7170, 0.6801, 0.6497, 0.6677, 0.8858, 0.8565, 0.7966], + device='cuda:0'), in_proj_covar=tensor([0.0420, 0.0407, 0.0496, 0.0507, 0.0450, 0.0473, 0.0479, 0.0484], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:30:34,495 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7603, 3.4910, 2.9694, 3.1024, 2.5407, 2.9499, 3.1834, 2.4589], + device='cuda:0'), covar=tensor([0.2019, 0.1555, 0.0976, 0.1401, 0.2932, 0.1277, 0.1676, 0.2432], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0307, 0.0220, 0.0279, 0.0313, 0.0260, 0.0252, 0.0266], + device='cuda:0'), out_proj_covar=tensor([1.1461e-04, 1.2190e-04, 8.7452e-05, 1.1070e-04, 1.2742e-04, 1.0338e-04, + 1.0160e-04, 1.0589e-04], device='cuda:0') +2023-04-27 09:30:38,494 INFO [finetune.py:976] (0/7) Epoch 17, batch 950, loss[loss=0.1807, simple_loss=0.2438, pruned_loss=0.05886, over 4819.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.244, pruned_loss=0.0526, over 946223.28 frames. ], batch size: 39, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:30:39,816 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=92595.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:30:49,700 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92611.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:30:59,830 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92625.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:31:07,654 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=92637.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 09:31:12,292 INFO [finetune.py:976] (0/7) Epoch 17, batch 1000, loss[loss=0.179, simple_loss=0.2506, pruned_loss=0.05375, over 4848.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2461, pruned_loss=0.05356, over 944977.34 frames. ], batch size: 47, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:31:17,225 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.750e+01 1.761e+02 2.088e+02 2.403e+02 5.880e+02, threshold=4.175e+02, percent-clipped=3.0 +2023-04-27 09:31:22,263 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=92659.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:31:22,274 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.5430, 3.4761, 2.9542, 4.1400, 3.4208, 3.5757, 1.7187, 3.6839], + device='cuda:0'), covar=tensor([0.1666, 0.1325, 0.3780, 0.1256, 0.3049, 0.1749, 0.4907, 0.2083], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0211, 0.0249, 0.0303, 0.0295, 0.0248, 0.0272, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 09:31:40,694 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=92685.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 09:31:40,737 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2503, 1.2243, 1.3465, 1.6086, 1.5980, 1.2799, 0.9440, 1.5044], + device='cuda:0'), covar=tensor([0.0904, 0.1340, 0.0857, 0.0584, 0.0645, 0.0810, 0.0936, 0.0605], + device='cuda:0'), in_proj_covar=tensor([0.0193, 0.0204, 0.0185, 0.0175, 0.0179, 0.0184, 0.0155, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:31:41,324 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92686.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:31:45,965 INFO [finetune.py:976] (0/7) Epoch 17, batch 1050, loss[loss=0.1652, simple_loss=0.2428, pruned_loss=0.04376, over 4828.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2507, pruned_loss=0.05482, over 946940.11 frames. ], batch size: 39, lr: 3.40e-03, grad_scale: 64.0 +2023-04-27 09:32:03,090 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92720.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:32:21,437 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6692, 2.8252, 2.5057, 2.6221, 2.9456, 2.5250, 3.8912, 2.3249], + device='cuda:0'), covar=tensor([0.3854, 0.2252, 0.3917, 0.3335, 0.1884, 0.2639, 0.1245, 0.3946], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0352, 0.0436, 0.0361, 0.0389, 0.0386, 0.0376, 0.0429], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:32:30,324 INFO [finetune.py:976] (0/7) Epoch 17, batch 1100, loss[loss=0.1892, simple_loss=0.2745, pruned_loss=0.05197, over 4919.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2514, pruned_loss=0.05449, over 949071.54 frames. ], batch size: 42, lr: 3.40e-03, grad_scale: 64.0 +2023-04-27 09:32:36,189 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.169e+02 1.706e+02 1.976e+02 2.311e+02 4.775e+02, threshold=3.952e+02, percent-clipped=2.0 +2023-04-27 09:32:50,217 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-27 09:33:11,522 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92781.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:33:32,087 INFO [finetune.py:976] (0/7) Epoch 17, batch 1150, loss[loss=0.1774, simple_loss=0.261, pruned_loss=0.04686, over 4903.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2514, pruned_loss=0.05385, over 950890.07 frames. ], batch size: 37, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:33:45,155 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6677, 1.8637, 0.9817, 1.3693, 2.2414, 1.5525, 1.4704, 1.5876], + device='cuda:0'), covar=tensor([0.0515, 0.0356, 0.0335, 0.0555, 0.0251, 0.0513, 0.0518, 0.0554], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0023, 0.0029, 0.0020, 0.0028, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0051], + device='cuda:0') +2023-04-27 09:34:18,508 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3043, 1.5586, 1.7182, 1.8402, 1.7407, 1.8651, 1.8027, 1.7752], + device='cuda:0'), covar=tensor([0.4201, 0.5668, 0.4905, 0.4642, 0.5669, 0.7610, 0.5732, 0.4999], + device='cuda:0'), in_proj_covar=tensor([0.0331, 0.0371, 0.0316, 0.0332, 0.0344, 0.0395, 0.0354, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 09:34:19,553 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92831.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:34:39,529 INFO [finetune.py:976] (0/7) Epoch 17, batch 1200, loss[loss=0.1547, simple_loss=0.2291, pruned_loss=0.0402, over 4743.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2495, pruned_loss=0.05339, over 952806.31 frames. ], batch size: 28, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:34:50,260 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.173e+01 1.694e+02 1.853e+02 2.185e+02 5.052e+02, threshold=3.707e+02, percent-clipped=2.0 +2023-04-27 09:35:25,396 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4930, 3.0131, 2.1822, 2.4644, 1.6482, 1.6695, 2.4367, 1.4969], + device='cuda:0'), covar=tensor([0.1578, 0.1313, 0.1505, 0.1596, 0.2429, 0.1929, 0.0981, 0.2141], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0212, 0.0168, 0.0204, 0.0201, 0.0184, 0.0156, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 09:35:44,780 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92892.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:35:45,252 INFO [finetune.py:976] (0/7) Epoch 17, batch 1250, loss[loss=0.1843, simple_loss=0.244, pruned_loss=0.0623, over 4911.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2472, pruned_loss=0.05311, over 951907.14 frames. ], batch size: 37, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:35:48,998 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92899.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:36:25,927 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92931.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:36:45,900 INFO [finetune.py:976] (0/7) Epoch 17, batch 1300, loss[loss=0.1508, simple_loss=0.2216, pruned_loss=0.04003, over 4782.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2445, pruned_loss=0.05232, over 950921.85 frames. ], batch size: 26, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:36:57,404 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.209e+02 1.510e+02 1.807e+02 2.085e+02 3.413e+02, threshold=3.613e+02, percent-clipped=0.0 +2023-04-27 09:36:59,300 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3194, 2.9210, 0.9756, 1.8146, 1.8426, 2.1937, 1.8078, 1.0051], + device='cuda:0'), covar=tensor([0.1362, 0.1028, 0.1704, 0.1170, 0.1011, 0.0936, 0.1361, 0.1941], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0243, 0.0137, 0.0121, 0.0132, 0.0153, 0.0118, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:37:08,808 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6920, 1.4319, 0.6868, 1.3811, 1.5053, 1.5800, 1.4414, 1.5088], + device='cuda:0'), covar=tensor([0.0458, 0.0331, 0.0377, 0.0499, 0.0280, 0.0447, 0.0462, 0.0502], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0051], + device='cuda:0') +2023-04-27 09:37:08,836 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92960.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:37:23,986 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=92975.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:37:30,531 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2592, 1.6454, 1.4384, 1.7948, 1.6964, 1.9126, 1.4311, 3.6189], + device='cuda:0'), covar=tensor([0.0592, 0.0776, 0.0777, 0.1168, 0.0620, 0.0516, 0.0756, 0.0149], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0039, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 09:37:33,571 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=92981.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:37:40,799 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=92992.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:37:41,318 INFO [finetune.py:976] (0/7) Epoch 17, batch 1350, loss[loss=0.2199, simple_loss=0.2733, pruned_loss=0.08323, over 4942.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.245, pruned_loss=0.05317, over 951657.46 frames. ], batch size: 33, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:37:53,910 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93010.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:38:16,114 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93036.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:38:16,128 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93036.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:38:16,849 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.29 vs. limit=5.0 +2023-04-27 09:38:26,343 INFO [finetune.py:976] (0/7) Epoch 17, batch 1400, loss[loss=0.1755, simple_loss=0.2498, pruned_loss=0.05058, over 4933.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2478, pruned_loss=0.05369, over 950992.89 frames. ], batch size: 38, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:38:44,540 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.602e+02 1.954e+02 2.219e+02 5.640e+02, threshold=3.909e+02, percent-clipped=3.0 +2023-04-27 09:39:08,803 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93071.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:39:11,826 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93076.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:39:28,194 INFO [finetune.py:976] (0/7) Epoch 17, batch 1450, loss[loss=0.1356, simple_loss=0.2189, pruned_loss=0.02613, over 4751.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2478, pruned_loss=0.05308, over 951561.00 frames. ], batch size: 27, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:39:37,299 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93097.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 09:40:01,735 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93115.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:40:24,592 INFO [finetune.py:976] (0/7) Epoch 17, batch 1500, loss[loss=0.1523, simple_loss=0.2176, pruned_loss=0.04344, over 4856.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2504, pruned_loss=0.05423, over 951586.41 frames. ], batch size: 31, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:40:31,495 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.667e+02 1.959e+02 2.406e+02 7.498e+02, threshold=3.919e+02, percent-clipped=4.0 +2023-04-27 09:40:53,979 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93176.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:41:06,563 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93187.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:41:15,566 INFO [finetune.py:976] (0/7) Epoch 17, batch 1550, loss[loss=0.1779, simple_loss=0.2512, pruned_loss=0.05226, over 4845.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2493, pruned_loss=0.05359, over 952621.83 frames. ], batch size: 44, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:41:49,256 INFO [finetune.py:976] (0/7) Epoch 17, batch 1600, loss[loss=0.1348, simple_loss=0.2087, pruned_loss=0.03048, over 4848.00 frames. ], tot_loss[loss=0.1789, simple_loss=0.249, pruned_loss=0.05441, over 953871.55 frames. ], batch size: 47, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:41:54,718 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.590e+02 1.909e+02 2.207e+02 5.345e+02, threshold=3.818e+02, percent-clipped=1.0 +2023-04-27 09:41:56,543 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7458, 1.6622, 1.0232, 1.4047, 1.9032, 1.6227, 1.5456, 1.5748], + device='cuda:0'), covar=tensor([0.0501, 0.0386, 0.0332, 0.0568, 0.0264, 0.0521, 0.0490, 0.0596], + device='cuda:0'), in_proj_covar=tensor([0.0029, 0.0024, 0.0023, 0.0029, 0.0020, 0.0028, 0.0028, 0.0029], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0051], + device='cuda:0') +2023-04-27 09:41:57,599 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93255.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:42:02,397 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-27 09:42:02,959 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-27 09:42:15,819 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93281.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:42:19,486 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93287.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:42:23,065 INFO [finetune.py:976] (0/7) Epoch 17, batch 1650, loss[loss=0.1586, simple_loss=0.2133, pruned_loss=0.05189, over 4919.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2456, pruned_loss=0.0536, over 954621.96 frames. ], batch size: 37, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:42:25,039 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8811, 1.8770, 1.7680, 1.5236, 2.0403, 1.7080, 2.5335, 1.5575], + device='cuda:0'), covar=tensor([0.3544, 0.2028, 0.5221, 0.3050, 0.1667, 0.2484, 0.1432, 0.4956], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0347, 0.0431, 0.0357, 0.0384, 0.0383, 0.0371, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:43:03,609 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=93329.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:43:04,865 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93331.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:43:14,679 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3398, 3.0638, 2.5251, 2.8326, 2.1008, 2.8060, 2.7404, 2.1291], + device='cuda:0'), covar=tensor([0.2117, 0.1222, 0.0824, 0.1157, 0.3342, 0.1138, 0.1931, 0.2867], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0310, 0.0222, 0.0281, 0.0317, 0.0262, 0.0253, 0.0269], + device='cuda:0'), out_proj_covar=tensor([1.1584e-04, 1.2329e-04, 8.8251e-05, 1.1157e-04, 1.2874e-04, 1.0418e-04, + 1.0222e-04, 1.0701e-04], device='cuda:0') +2023-04-27 09:43:17,621 INFO [finetune.py:976] (0/7) Epoch 17, batch 1700, loss[loss=0.2009, simple_loss=0.2688, pruned_loss=0.06648, over 4895.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2435, pruned_loss=0.05278, over 955417.86 frames. ], batch size: 43, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:43:23,104 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.005e+02 1.559e+02 1.851e+02 2.220e+02 4.230e+02, threshold=3.703e+02, percent-clipped=1.0 +2023-04-27 09:43:33,685 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93366.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:43:36,049 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4263, 1.8993, 1.6229, 2.2760, 2.4388, 1.9943, 1.8956, 1.7372], + device='cuda:0'), covar=tensor([0.1811, 0.1528, 0.1855, 0.1579, 0.1139, 0.1811, 0.2347, 0.2332], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0312, 0.0352, 0.0290, 0.0329, 0.0311, 0.0301, 0.0367], + device='cuda:0'), out_proj_covar=tensor([6.3435e-05, 6.5084e-05, 7.4992e-05, 5.9074e-05, 6.8378e-05, 6.5659e-05, + 6.3600e-05, 7.8309e-05], device='cuda:0') +2023-04-27 09:43:40,763 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93376.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:43:51,036 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93392.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 09:43:51,575 INFO [finetune.py:976] (0/7) Epoch 17, batch 1750, loss[loss=0.1572, simple_loss=0.2315, pruned_loss=0.04145, over 4867.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.245, pruned_loss=0.05326, over 954020.54 frames. ], batch size: 31, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:44:13,250 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=93424.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:44:36,171 INFO [finetune.py:976] (0/7) Epoch 17, batch 1800, loss[loss=0.198, simple_loss=0.2657, pruned_loss=0.06513, over 4155.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2475, pruned_loss=0.05367, over 954621.73 frames. ], batch size: 65, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:44:47,780 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.710e+02 1.900e+02 2.209e+02 3.616e+02, threshold=3.799e+02, percent-clipped=0.0 +2023-04-27 09:45:05,910 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=93471.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:45:21,533 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7592, 1.3954, 1.3402, 1.5176, 1.9102, 1.4923, 1.2393, 1.2772], + device='cuda:0'), covar=tensor([0.1411, 0.1423, 0.1992, 0.1236, 0.0726, 0.1831, 0.2031, 0.2100], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0314, 0.0354, 0.0292, 0.0331, 0.0313, 0.0303, 0.0369], + device='cuda:0'), out_proj_covar=tensor([6.3826e-05, 6.5503e-05, 7.5519e-05, 5.9375e-05, 6.8808e-05, 6.5961e-05, + 6.3957e-05, 7.8800e-05], device='cuda:0') +2023-04-27 09:45:22,734 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93487.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:45:26,249 INFO [finetune.py:976] (0/7) Epoch 17, batch 1850, loss[loss=0.1576, simple_loss=0.2283, pruned_loss=0.04346, over 4789.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2492, pruned_loss=0.05443, over 955206.60 frames. ], batch size: 25, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:46:27,192 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=93535.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:46:37,237 INFO [finetune.py:976] (0/7) Epoch 17, batch 1900, loss[loss=0.1491, simple_loss=0.2313, pruned_loss=0.03346, over 4814.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2507, pruned_loss=0.05482, over 954516.16 frames. ], batch size: 38, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:46:42,804 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.573e+02 1.862e+02 2.216e+02 4.322e+02, threshold=3.725e+02, percent-clipped=2.0 +2023-04-27 09:46:44,734 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93555.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:46:57,204 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8651, 1.3415, 1.5351, 1.4058, 1.9490, 1.5793, 1.3176, 1.4665], + device='cuda:0'), covar=tensor([0.1548, 0.1420, 0.2101, 0.1493, 0.0828, 0.1615, 0.1779, 0.2122], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0314, 0.0353, 0.0291, 0.0330, 0.0312, 0.0302, 0.0368], + device='cuda:0'), out_proj_covar=tensor([6.3670e-05, 6.5396e-05, 7.5217e-05, 5.9228e-05, 6.8694e-05, 6.5724e-05, + 6.3788e-05, 7.8552e-05], device='cuda:0') +2023-04-27 09:46:57,823 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1577, 2.7675, 2.2148, 2.1425, 1.5425, 1.5654, 2.3923, 1.4802], + device='cuda:0'), covar=tensor([0.1804, 0.1662, 0.1626, 0.1888, 0.2625, 0.2164, 0.1060, 0.2281], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0214, 0.0169, 0.0206, 0.0201, 0.0186, 0.0157, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 09:47:06,586 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93587.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:47:10,125 INFO [finetune.py:976] (0/7) Epoch 17, batch 1950, loss[loss=0.1403, simple_loss=0.2276, pruned_loss=0.0265, over 4762.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2493, pruned_loss=0.05407, over 955978.42 frames. ], batch size: 28, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:47:16,345 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=93603.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:47:35,234 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93631.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:47:37,605 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=93635.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:47:48,923 INFO [finetune.py:976] (0/7) Epoch 17, batch 2000, loss[loss=0.1342, simple_loss=0.2037, pruned_loss=0.03233, over 4817.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2483, pruned_loss=0.05429, over 955949.93 frames. ], batch size: 25, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:47:54,465 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.579e+02 1.885e+02 2.263e+02 4.038e+02, threshold=3.769e+02, percent-clipped=1.0 +2023-04-27 09:48:03,162 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93666.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:48:11,986 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=93679.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:48:17,873 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3707, 1.3065, 1.6623, 1.5585, 1.2996, 1.2284, 1.3156, 0.8106], + device='cuda:0'), covar=tensor([0.0523, 0.0685, 0.0435, 0.0648, 0.0829, 0.1230, 0.0546, 0.0673], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0070, 0.0069, 0.0068, 0.0075, 0.0097, 0.0075, 0.0068], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:48:21,386 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93692.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:48:21,911 INFO [finetune.py:976] (0/7) Epoch 17, batch 2050, loss[loss=0.1451, simple_loss=0.2175, pruned_loss=0.03636, over 4915.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2452, pruned_loss=0.05331, over 955652.18 frames. ], batch size: 35, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:48:35,562 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=93714.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:48:53,237 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=93740.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:48:55,037 INFO [finetune.py:976] (0/7) Epoch 17, batch 2100, loss[loss=0.1849, simple_loss=0.2472, pruned_loss=0.06135, over 4822.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2457, pruned_loss=0.05371, over 956012.47 frames. ], batch size: 30, lr: 3.40e-03, grad_scale: 32.0 +2023-04-27 09:49:01,811 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.279e+02 1.585e+02 1.847e+02 2.242e+02 6.268e+02, threshold=3.694e+02, percent-clipped=2.0 +2023-04-27 09:49:11,172 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5651, 1.1286, 1.3615, 1.1532, 1.6672, 1.3605, 1.1277, 1.2861], + device='cuda:0'), covar=tensor([0.1745, 0.1425, 0.2035, 0.1609, 0.0963, 0.1602, 0.2049, 0.2536], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0315, 0.0352, 0.0292, 0.0330, 0.0312, 0.0303, 0.0368], + device='cuda:0'), out_proj_covar=tensor([6.3856e-05, 6.5575e-05, 7.5120e-05, 5.9408e-05, 6.8680e-05, 6.5730e-05, + 6.3855e-05, 7.8554e-05], device='cuda:0') +2023-04-27 09:49:12,991 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93770.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:49:13,585 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=93771.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:49:33,701 INFO [finetune.py:976] (0/7) Epoch 17, batch 2150, loss[loss=0.2949, simple_loss=0.3469, pruned_loss=0.1214, over 4925.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2495, pruned_loss=0.05487, over 953376.77 frames. ], batch size: 38, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:49:40,715 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2675, 1.6742, 2.1237, 2.7908, 2.2600, 1.7260, 1.7113, 2.0419], + device='cuda:0'), covar=tensor([0.3554, 0.3473, 0.1652, 0.2542, 0.2755, 0.2621, 0.3972, 0.2343], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0247, 0.0227, 0.0317, 0.0218, 0.0231, 0.0230, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 09:50:13,741 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=93819.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:50:26,735 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=93831.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:50:47,865 INFO [finetune.py:976] (0/7) Epoch 17, batch 2200, loss[loss=0.2111, simple_loss=0.2814, pruned_loss=0.07037, over 4912.00 frames. ], tot_loss[loss=0.1793, simple_loss=0.2498, pruned_loss=0.05434, over 954018.71 frames. ], batch size: 36, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:50:59,358 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.629e+02 1.916e+02 2.338e+02 4.475e+02, threshold=3.833e+02, percent-clipped=3.0 +2023-04-27 09:51:20,316 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3995, 1.2776, 1.6954, 1.6076, 1.2871, 1.1462, 1.3702, 0.8863], + device='cuda:0'), covar=tensor([0.0553, 0.0643, 0.0424, 0.0658, 0.0885, 0.1136, 0.0630, 0.0649], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0070, 0.0068, 0.0068, 0.0075, 0.0096, 0.0075, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:51:21,538 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6766, 1.4824, 1.7154, 1.9762, 1.9877, 1.5926, 1.3985, 1.8750], + device='cuda:0'), covar=tensor([0.0780, 0.1147, 0.0739, 0.0511, 0.0580, 0.0808, 0.0758, 0.0538], + device='cuda:0'), in_proj_covar=tensor([0.0189, 0.0201, 0.0182, 0.0172, 0.0176, 0.0180, 0.0152, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:51:23,389 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6401, 2.0336, 1.5712, 1.4030, 1.1977, 1.2228, 1.5422, 1.1626], + device='cuda:0'), covar=tensor([0.1757, 0.1380, 0.1552, 0.1872, 0.2468, 0.1986, 0.1175, 0.2175], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0213, 0.0169, 0.0206, 0.0201, 0.0185, 0.0157, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 09:51:41,201 INFO [finetune.py:976] (0/7) Epoch 17, batch 2250, loss[loss=0.165, simple_loss=0.2395, pruned_loss=0.0453, over 4816.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.251, pruned_loss=0.05477, over 954904.01 frames. ], batch size: 45, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:52:14,471 INFO [finetune.py:976] (0/7) Epoch 17, batch 2300, loss[loss=0.1479, simple_loss=0.2212, pruned_loss=0.03729, over 4765.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2515, pruned_loss=0.05463, over 955757.77 frames. ], batch size: 26, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:52:20,959 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.592e+02 1.958e+02 2.327e+02 4.753e+02, threshold=3.916e+02, percent-clipped=2.0 +2023-04-27 09:52:40,177 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=93981.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:52:47,881 INFO [finetune.py:976] (0/7) Epoch 17, batch 2350, loss[loss=0.1801, simple_loss=0.2412, pruned_loss=0.05955, over 4729.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2494, pruned_loss=0.0538, over 954028.65 frames. ], batch size: 54, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:52:59,504 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-94000.pt +2023-04-27 09:53:00,825 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1921, 2.5665, 0.9357, 1.3741, 1.9148, 1.2307, 3.4257, 1.6439], + device='cuda:0'), covar=tensor([0.0608, 0.0683, 0.0864, 0.1203, 0.0532, 0.0991, 0.0218, 0.0624], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0046, 0.0050, 0.0052, 0.0075, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 09:53:46,145 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9406, 2.3704, 1.3008, 1.6911, 2.3597, 1.9186, 1.8103, 1.8334], + device='cuda:0'), covar=tensor([0.0479, 0.0337, 0.0302, 0.0551, 0.0241, 0.0526, 0.0543, 0.0560], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0044, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 09:53:48,618 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94042.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:53:49,094 INFO [finetune.py:976] (0/7) Epoch 17, batch 2400, loss[loss=0.1606, simple_loss=0.2244, pruned_loss=0.04835, over 4914.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2468, pruned_loss=0.05325, over 952141.72 frames. ], batch size: 43, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:53:56,111 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.042e+02 1.528e+02 1.774e+02 2.113e+02 4.800e+02, threshold=3.549e+02, percent-clipped=1.0 +2023-04-27 09:53:58,003 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5744, 1.7384, 1.8453, 1.9620, 1.7873, 1.9639, 2.0148, 1.8875], + device='cuda:0'), covar=tensor([0.4264, 0.6354, 0.5469, 0.4874, 0.6246, 0.7820, 0.6001, 0.5501], + device='cuda:0'), in_proj_covar=tensor([0.0329, 0.0370, 0.0316, 0.0331, 0.0342, 0.0393, 0.0353, 0.0323], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 09:54:09,227 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3464, 3.1160, 2.5372, 2.8982, 2.2047, 2.8180, 2.7678, 2.0958], + device='cuda:0'), covar=tensor([0.2173, 0.1005, 0.0770, 0.1095, 0.2968, 0.1146, 0.1791, 0.2863], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0308, 0.0219, 0.0278, 0.0313, 0.0258, 0.0250, 0.0266], + device='cuda:0'), out_proj_covar=tensor([1.1455e-04, 1.2247e-04, 8.7285e-05, 1.1049e-04, 1.2731e-04, 1.0277e-04, + 1.0102e-04, 1.0577e-04], device='cuda:0') +2023-04-27 09:54:23,068 INFO [finetune.py:976] (0/7) Epoch 17, batch 2450, loss[loss=0.199, simple_loss=0.2801, pruned_loss=0.05897, over 4818.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.244, pruned_loss=0.0525, over 954697.01 frames. ], batch size: 45, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:54:46,624 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94126.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:54:57,072 INFO [finetune.py:976] (0/7) Epoch 17, batch 2500, loss[loss=0.2291, simple_loss=0.2966, pruned_loss=0.08084, over 4903.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2463, pruned_loss=0.05354, over 954049.12 frames. ], batch size: 35, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:55:03,652 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.665e+02 1.997e+02 2.427e+02 4.291e+02, threshold=3.995e+02, percent-clipped=3.0 +2023-04-27 09:55:10,326 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-27 09:55:31,031 INFO [finetune.py:976] (0/7) Epoch 17, batch 2550, loss[loss=0.235, simple_loss=0.3086, pruned_loss=0.08071, over 4823.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2498, pruned_loss=0.05388, over 953856.32 frames. ], batch size: 39, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:55:52,326 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94223.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 09:56:09,852 INFO [finetune.py:976] (0/7) Epoch 17, batch 2600, loss[loss=0.1881, simple_loss=0.2562, pruned_loss=0.06002, over 4719.00 frames. ], tot_loss[loss=0.1799, simple_loss=0.2511, pruned_loss=0.05438, over 953016.99 frames. ], batch size: 59, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:56:21,229 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.193e+02 1.596e+02 1.895e+02 2.398e+02 4.293e+02, threshold=3.790e+02, percent-clipped=2.0 +2023-04-27 09:57:00,082 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94284.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 09:57:04,366 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5818, 1.1239, 1.7427, 2.1042, 1.6825, 1.6241, 1.6708, 1.6874], + device='cuda:0'), covar=tensor([0.4593, 0.6801, 0.6192, 0.5874, 0.5782, 0.7294, 0.7779, 0.8149], + device='cuda:0'), in_proj_covar=tensor([0.0422, 0.0407, 0.0496, 0.0505, 0.0450, 0.0474, 0.0480, 0.0484], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 09:57:05,478 INFO [finetune.py:976] (0/7) Epoch 17, batch 2650, loss[loss=0.2039, simple_loss=0.2757, pruned_loss=0.06609, over 4919.00 frames. ], tot_loss[loss=0.1812, simple_loss=0.2525, pruned_loss=0.05499, over 955263.45 frames. ], batch size: 38, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:57:34,894 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94337.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:57:38,464 INFO [finetune.py:976] (0/7) Epoch 17, batch 2700, loss[loss=0.1761, simple_loss=0.2568, pruned_loss=0.0477, over 4885.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2503, pruned_loss=0.05354, over 954868.06 frames. ], batch size: 43, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:57:41,616 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=94348.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:57:43,964 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.118e+02 1.591e+02 1.889e+02 2.175e+02 3.860e+02, threshold=3.779e+02, percent-clipped=1.0 +2023-04-27 09:58:17,886 INFO [finetune.py:976] (0/7) Epoch 17, batch 2750, loss[loss=0.1928, simple_loss=0.2432, pruned_loss=0.07118, over 4910.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2479, pruned_loss=0.05337, over 952865.08 frames. ], batch size: 32, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:58:38,906 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=94409.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 09:58:38,994 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.92 vs. limit=5.0 +2023-04-27 09:59:01,608 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94426.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:59:24,239 INFO [finetune.py:976] (0/7) Epoch 17, batch 2800, loss[loss=0.1842, simple_loss=0.2524, pruned_loss=0.05797, over 4907.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2449, pruned_loss=0.0526, over 954422.64 frames. ], batch size: 37, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 09:59:35,021 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.496e+02 1.786e+02 2.106e+02 4.249e+02, threshold=3.571e+02, percent-clipped=1.0 +2023-04-27 09:59:54,410 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=94474.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 09:59:54,441 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3432, 3.1678, 0.8811, 1.8509, 1.7755, 2.3020, 1.8985, 0.9709], + device='cuda:0'), covar=tensor([0.1378, 0.0959, 0.2000, 0.1141, 0.1053, 0.0975, 0.1468, 0.2086], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0244, 0.0138, 0.0121, 0.0132, 0.0153, 0.0117, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 09:59:56,904 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9934, 1.8369, 1.6260, 1.4827, 1.8443, 1.5687, 2.1213, 1.4772], + device='cuda:0'), covar=tensor([0.2937, 0.1353, 0.3635, 0.2228, 0.1331, 0.1883, 0.1472, 0.3872], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0346, 0.0429, 0.0355, 0.0384, 0.0381, 0.0371, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:00:06,867 INFO [finetune.py:976] (0/7) Epoch 17, batch 2850, loss[loss=0.2078, simple_loss=0.2648, pruned_loss=0.07542, over 4759.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2448, pruned_loss=0.05289, over 955291.77 frames. ], batch size: 28, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 10:00:39,852 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.2770, 3.2829, 2.4506, 3.7865, 3.3462, 3.1732, 1.5639, 3.2272], + device='cuda:0'), covar=tensor([0.2239, 0.1322, 0.3562, 0.2530, 0.3215, 0.2345, 0.5661, 0.2835], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0212, 0.0247, 0.0303, 0.0296, 0.0247, 0.0270, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 10:00:40,995 INFO [finetune.py:976] (0/7) Epoch 17, batch 2900, loss[loss=0.1863, simple_loss=0.2777, pruned_loss=0.04747, over 4801.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2462, pruned_loss=0.05323, over 952174.73 frames. ], batch size: 41, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 10:00:46,389 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.024e+02 1.635e+02 2.008e+02 2.460e+02 5.439e+02, threshold=4.016e+02, percent-clipped=3.0 +2023-04-27 10:01:03,845 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94579.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 10:01:11,392 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5440, 1.5915, 4.4986, 4.2216, 3.9665, 4.3055, 4.1617, 3.9436], + device='cuda:0'), covar=tensor([0.6994, 0.5430, 0.0963, 0.1701, 0.1022, 0.1578, 0.1346, 0.1431], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0303, 0.0400, 0.0401, 0.0345, 0.0404, 0.0308, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:01:14,241 INFO [finetune.py:976] (0/7) Epoch 17, batch 2950, loss[loss=0.2119, simple_loss=0.275, pruned_loss=0.0744, over 4896.00 frames. ], tot_loss[loss=0.18, simple_loss=0.2503, pruned_loss=0.05485, over 950869.43 frames. ], batch size: 43, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 10:01:16,204 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7407, 3.7275, 1.0772, 2.0484, 2.2722, 2.6222, 2.1443, 1.0206], + device='cuda:0'), covar=tensor([0.1280, 0.0971, 0.1957, 0.1217, 0.0885, 0.0983, 0.1521, 0.1842], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0246, 0.0139, 0.0122, 0.0133, 0.0155, 0.0119, 0.0122], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 10:01:35,049 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 10:01:48,941 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1353, 1.4462, 5.3595, 4.9857, 4.7618, 5.1963, 4.7598, 4.7800], + device='cuda:0'), covar=tensor([0.6304, 0.6173, 0.0947, 0.1833, 0.1000, 0.1302, 0.1028, 0.1467], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0303, 0.0400, 0.0400, 0.0345, 0.0403, 0.0308, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:01:54,135 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94637.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:02:04,040 INFO [finetune.py:976] (0/7) Epoch 17, batch 3000, loss[loss=0.2117, simple_loss=0.2807, pruned_loss=0.07133, over 4889.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2496, pruned_loss=0.05402, over 950066.29 frames. ], batch size: 35, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 10:02:04,042 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 10:02:27,440 INFO [finetune.py:1010] (0/7) Epoch 17, validation: loss=0.1526, simple_loss=0.2233, pruned_loss=0.04089, over 2265189.00 frames. +2023-04-27 10:02:27,441 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 10:02:39,400 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.688e+02 2.099e+02 2.458e+02 3.567e+02, threshold=4.198e+02, percent-clipped=0.0 +2023-04-27 10:02:47,151 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2022, 2.8532, 2.3522, 2.7002, 2.0867, 2.4977, 2.6039, 1.8062], + device='cuda:0'), covar=tensor([0.2307, 0.1409, 0.0916, 0.1304, 0.3305, 0.1247, 0.1970, 0.3256], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0310, 0.0221, 0.0282, 0.0315, 0.0261, 0.0251, 0.0268], + device='cuda:0'), out_proj_covar=tensor([1.1521e-04, 1.2344e-04, 8.8021e-05, 1.1189e-04, 1.2819e-04, 1.0382e-04, + 1.0164e-04, 1.0645e-04], device='cuda:0') +2023-04-27 10:03:10,423 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=94685.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:03:15,761 INFO [finetune.py:976] (0/7) Epoch 17, batch 3050, loss[loss=0.1757, simple_loss=0.2547, pruned_loss=0.04838, over 4801.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2496, pruned_loss=0.05336, over 951699.06 frames. ], batch size: 51, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 10:03:24,040 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=94704.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 10:03:47,903 INFO [finetune.py:976] (0/7) Epoch 17, batch 3100, loss[loss=0.1932, simple_loss=0.2547, pruned_loss=0.06582, over 4727.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2481, pruned_loss=0.0532, over 950875.32 frames. ], batch size: 54, lr: 3.39e-03, grad_scale: 32.0 +2023-04-27 10:03:55,871 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.556e+02 1.873e+02 2.227e+02 4.960e+02, threshold=3.745e+02, percent-clipped=1.0 +2023-04-27 10:04:12,122 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5012, 1.3350, 4.4483, 4.1472, 3.8894, 4.2175, 4.1410, 3.8907], + device='cuda:0'), covar=tensor([0.6897, 0.5725, 0.0866, 0.1600, 0.1052, 0.1613, 0.1339, 0.1346], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0304, 0.0400, 0.0401, 0.0345, 0.0403, 0.0308, 0.0362], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:04:21,246 INFO [finetune.py:976] (0/7) Epoch 17, batch 3150, loss[loss=0.1714, simple_loss=0.2392, pruned_loss=0.0518, over 4904.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2459, pruned_loss=0.05296, over 952092.38 frames. ], batch size: 32, lr: 3.39e-03, grad_scale: 64.0 +2023-04-27 10:04:29,636 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3736, 1.6682, 1.7630, 1.8756, 1.7627, 1.8625, 1.8481, 1.8208], + device='cuda:0'), covar=tensor([0.4010, 0.4792, 0.4113, 0.4003, 0.5099, 0.6707, 0.4897, 0.4351], + device='cuda:0'), in_proj_covar=tensor([0.0330, 0.0370, 0.0316, 0.0332, 0.0341, 0.0393, 0.0353, 0.0322], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 10:05:23,151 INFO [finetune.py:976] (0/7) Epoch 17, batch 3200, loss[loss=0.1847, simple_loss=0.2632, pruned_loss=0.05311, over 4826.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2438, pruned_loss=0.05233, over 953323.13 frames. ], batch size: 39, lr: 3.39e-03, grad_scale: 64.0 +2023-04-27 10:05:34,598 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.148e+01 1.533e+02 1.770e+02 2.163e+02 3.313e+02, threshold=3.540e+02, percent-clipped=0.0 +2023-04-27 10:05:53,664 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=94879.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 10:06:02,203 INFO [finetune.py:976] (0/7) Epoch 17, batch 3250, loss[loss=0.1843, simple_loss=0.2506, pruned_loss=0.05904, over 4148.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2437, pruned_loss=0.05238, over 952450.92 frames. ], batch size: 65, lr: 3.39e-03, grad_scale: 64.0 +2023-04-27 10:06:26,490 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=94927.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 10:06:26,516 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0611, 1.2694, 1.1433, 1.7009, 1.3933, 1.7178, 1.2357, 3.3619], + device='cuda:0'), covar=tensor([0.0716, 0.0979, 0.1047, 0.1261, 0.0798, 0.0607, 0.0970, 0.0197], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 10:06:36,229 INFO [finetune.py:976] (0/7) Epoch 17, batch 3300, loss[loss=0.1594, simple_loss=0.2229, pruned_loss=0.04796, over 3919.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2477, pruned_loss=0.05368, over 952598.09 frames. ], batch size: 17, lr: 3.38e-03, grad_scale: 64.0 +2023-04-27 10:06:47,632 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.257e+02 1.598e+02 1.896e+02 2.226e+02 3.987e+02, threshold=3.793e+02, percent-clipped=1.0 +2023-04-27 10:07:30,061 INFO [finetune.py:976] (0/7) Epoch 17, batch 3350, loss[loss=0.2013, simple_loss=0.2529, pruned_loss=0.07482, over 4795.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2501, pruned_loss=0.05446, over 953750.97 frames. ], batch size: 25, lr: 3.38e-03, grad_scale: 64.0 +2023-04-27 10:07:33,228 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7059, 2.2160, 1.7499, 1.6235, 1.2546, 1.2815, 1.7964, 1.2046], + device='cuda:0'), covar=tensor([0.1900, 0.1406, 0.1564, 0.1922, 0.2669, 0.2127, 0.1096, 0.2267], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0213, 0.0168, 0.0206, 0.0201, 0.0185, 0.0156, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 10:07:36,964 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95004.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:07:55,416 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9895, 1.4712, 1.3388, 1.7391, 1.5249, 1.8873, 1.3338, 3.4864], + device='cuda:0'), covar=tensor([0.0674, 0.0771, 0.0824, 0.1135, 0.0618, 0.0530, 0.0747, 0.0171], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0044, 0.0040, 0.0038, 0.0039, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 10:08:02,694 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8277, 2.3678, 1.9553, 2.2306, 1.6524, 1.9496, 1.8591, 1.4996], + device='cuda:0'), covar=tensor([0.1841, 0.1035, 0.0792, 0.0953, 0.2964, 0.1044, 0.1819, 0.2427], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0306, 0.0219, 0.0279, 0.0312, 0.0259, 0.0249, 0.0266], + device='cuda:0'), out_proj_covar=tensor([1.1432e-04, 1.2162e-04, 8.7230e-05, 1.1083e-04, 1.2685e-04, 1.0316e-04, + 1.0089e-04, 1.0555e-04], device='cuda:0') +2023-04-27 10:08:03,816 INFO [finetune.py:976] (0/7) Epoch 17, batch 3400, loss[loss=0.1734, simple_loss=0.2553, pruned_loss=0.0457, over 4822.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2505, pruned_loss=0.05439, over 954260.83 frames. ], batch size: 25, lr: 3.38e-03, grad_scale: 64.0 +2023-04-27 10:08:09,293 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.633e+02 1.895e+02 2.474e+02 5.536e+02, threshold=3.790e+02, percent-clipped=1.0 +2023-04-27 10:08:09,357 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=95052.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:08:14,387 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 10:08:37,251 INFO [finetune.py:976] (0/7) Epoch 17, batch 3450, loss[loss=0.2156, simple_loss=0.2748, pruned_loss=0.07819, over 4878.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2503, pruned_loss=0.05425, over 954171.66 frames. ], batch size: 35, lr: 3.38e-03, grad_scale: 64.0 +2023-04-27 10:08:44,629 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95105.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 10:09:07,639 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.10 vs. limit=5.0 +2023-04-27 10:09:11,014 INFO [finetune.py:976] (0/7) Epoch 17, batch 3500, loss[loss=0.2001, simple_loss=0.2558, pruned_loss=0.07215, over 4185.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.248, pruned_loss=0.05381, over 950981.52 frames. ], batch size: 65, lr: 3.38e-03, grad_scale: 64.0 +2023-04-27 10:09:16,409 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.623e+02 1.952e+02 2.265e+02 3.860e+02, threshold=3.904e+02, percent-clipped=1.0 +2023-04-27 10:09:25,598 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95166.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 10:09:44,909 INFO [finetune.py:976] (0/7) Epoch 17, batch 3550, loss[loss=0.159, simple_loss=0.2267, pruned_loss=0.04562, over 4739.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.245, pruned_loss=0.05306, over 950393.95 frames. ], batch size: 54, lr: 3.38e-03, grad_scale: 64.0 +2023-04-27 10:09:48,120 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8570, 2.2833, 1.9450, 1.5601, 1.3751, 1.3946, 2.0087, 1.3148], + device='cuda:0'), covar=tensor([0.1724, 0.1384, 0.1390, 0.1794, 0.2230, 0.1965, 0.0936, 0.2016], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0212, 0.0168, 0.0205, 0.0200, 0.0184, 0.0155, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 10:10:38,692 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-27 10:10:42,813 INFO [finetune.py:976] (0/7) Epoch 17, batch 3600, loss[loss=0.1605, simple_loss=0.222, pruned_loss=0.0495, over 4740.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2443, pruned_loss=0.0534, over 950239.11 frames. ], batch size: 23, lr: 3.38e-03, grad_scale: 64.0 +2023-04-27 10:10:54,072 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.027e+02 1.577e+02 1.958e+02 2.437e+02 7.407e+02, threshold=3.916e+02, percent-clipped=3.0 +2023-04-27 10:10:59,921 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95260.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:11:10,483 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95268.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:11:34,140 INFO [finetune.py:976] (0/7) Epoch 17, batch 3650, loss[loss=0.1236, simple_loss=0.1932, pruned_loss=0.02699, over 4723.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2463, pruned_loss=0.05399, over 952670.18 frames. ], batch size: 23, lr: 3.38e-03, grad_scale: 64.0 +2023-04-27 10:11:51,442 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95321.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:11:57,807 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95329.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:12:05,339 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95339.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:12:07,681 INFO [finetune.py:976] (0/7) Epoch 17, batch 3700, loss[loss=0.2294, simple_loss=0.2949, pruned_loss=0.08198, over 4751.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2492, pruned_loss=0.05421, over 951697.98 frames. ], batch size: 54, lr: 3.38e-03, grad_scale: 64.0 +2023-04-27 10:12:13,171 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.977e+01 1.742e+02 1.979e+02 2.292e+02 4.077e+02, threshold=3.957e+02, percent-clipped=1.0 +2023-04-27 10:12:40,402 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5845, 0.6719, 1.4889, 1.9561, 1.6353, 1.4871, 1.5061, 1.5351], + device='cuda:0'), covar=tensor([0.4659, 0.6594, 0.6387, 0.6124, 0.6284, 0.7314, 0.7339, 0.7399], + device='cuda:0'), in_proj_covar=tensor([0.0420, 0.0406, 0.0496, 0.0503, 0.0448, 0.0473, 0.0479, 0.0484], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:12:41,445 INFO [finetune.py:976] (0/7) Epoch 17, batch 3750, loss[loss=0.1806, simple_loss=0.2574, pruned_loss=0.0519, over 4841.00 frames. ], tot_loss[loss=0.1801, simple_loss=0.2507, pruned_loss=0.05477, over 951643.71 frames. ], batch size: 49, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:12:46,002 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95400.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:13:14,397 INFO [finetune.py:976] (0/7) Epoch 17, batch 3800, loss[loss=0.1806, simple_loss=0.2456, pruned_loss=0.0578, over 4880.00 frames. ], tot_loss[loss=0.1808, simple_loss=0.2514, pruned_loss=0.05506, over 951394.34 frames. ], batch size: 35, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:13:20,913 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.628e+02 1.968e+02 2.270e+02 5.105e+02, threshold=3.935e+02, percent-clipped=1.0 +2023-04-27 10:13:25,882 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95461.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 10:13:48,140 INFO [finetune.py:976] (0/7) Epoch 17, batch 3850, loss[loss=0.1613, simple_loss=0.2391, pruned_loss=0.04176, over 4834.00 frames. ], tot_loss[loss=0.1804, simple_loss=0.2508, pruned_loss=0.05498, over 951768.18 frames. ], batch size: 30, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:14:20,427 INFO [finetune.py:976] (0/7) Epoch 17, batch 3900, loss[loss=0.1537, simple_loss=0.2307, pruned_loss=0.03839, over 4862.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2499, pruned_loss=0.05573, over 952451.30 frames. ], batch size: 31, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:14:27,544 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.967e+01 1.562e+02 1.786e+02 2.211e+02 6.075e+02, threshold=3.573e+02, percent-clipped=1.0 +2023-04-27 10:14:33,821 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3072, 1.6406, 1.5558, 2.1700, 2.3345, 1.9044, 1.8886, 1.6523], + device='cuda:0'), covar=tensor([0.1681, 0.1743, 0.1816, 0.1906, 0.1097, 0.2032, 0.1958, 0.2135], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0314, 0.0351, 0.0288, 0.0327, 0.0310, 0.0300, 0.0365], + device='cuda:0'), out_proj_covar=tensor([6.3729e-05, 6.5411e-05, 7.4778e-05, 5.8553e-05, 6.7971e-05, 6.5306e-05, + 6.3246e-05, 7.7871e-05], device='cuda:0') +2023-04-27 10:14:41,017 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6892, 1.7505, 0.8115, 1.3527, 1.7410, 1.5794, 1.4277, 1.5103], + device='cuda:0'), covar=tensor([0.0503, 0.0368, 0.0355, 0.0571, 0.0257, 0.0527, 0.0519, 0.0585], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 10:14:52,354 INFO [finetune.py:976] (0/7) Epoch 17, batch 3950, loss[loss=0.1612, simple_loss=0.2301, pruned_loss=0.04619, over 4911.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2461, pruned_loss=0.05398, over 952140.37 frames. ], batch size: 37, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:14:58,316 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8501, 1.9592, 1.1463, 1.4984, 2.0579, 1.7371, 1.5792, 1.6868], + device='cuda:0'), covar=tensor([0.0492, 0.0347, 0.0298, 0.0522, 0.0243, 0.0488, 0.0476, 0.0552], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 10:15:08,536 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95616.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:15:09,180 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95617.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:15:13,451 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95624.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:15:31,266 INFO [finetune.py:976] (0/7) Epoch 17, batch 4000, loss[loss=0.2123, simple_loss=0.27, pruned_loss=0.07724, over 4711.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2455, pruned_loss=0.05415, over 952909.99 frames. ], batch size: 59, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:15:43,874 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.207e+01 1.538e+02 1.849e+02 2.258e+02 4.125e+02, threshold=3.697e+02, percent-clipped=1.0 +2023-04-27 10:16:16,171 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95678.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:16:36,027 INFO [finetune.py:976] (0/7) Epoch 17, batch 4050, loss[loss=0.1898, simple_loss=0.2769, pruned_loss=0.05135, over 4921.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2484, pruned_loss=0.05499, over 951985.44 frames. ], batch size: 36, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:16:37,790 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95695.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:17:00,892 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0453, 2.1204, 1.7662, 1.8658, 2.0819, 1.5996, 2.8071, 1.3983], + device='cuda:0'), covar=tensor([0.4141, 0.1972, 0.4801, 0.3057, 0.1812, 0.3092, 0.1135, 0.5246], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0346, 0.0427, 0.0355, 0.0381, 0.0381, 0.0370, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:17:36,636 INFO [finetune.py:976] (0/7) Epoch 17, batch 4100, loss[loss=0.1589, simple_loss=0.218, pruned_loss=0.04993, over 4740.00 frames. ], tot_loss[loss=0.1816, simple_loss=0.2515, pruned_loss=0.05582, over 953450.58 frames. ], batch size: 23, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:17:43,691 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.341e+02 1.651e+02 1.954e+02 2.364e+02 4.876e+02, threshold=3.909e+02, percent-clipped=1.0 +2023-04-27 10:17:50,118 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95761.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 10:17:53,677 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2398, 1.3099, 3.8056, 3.5166, 3.3602, 3.6481, 3.6292, 3.3268], + device='cuda:0'), covar=tensor([0.7096, 0.5650, 0.1119, 0.1932, 0.1188, 0.1771, 0.1625, 0.1697], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0305, 0.0401, 0.0404, 0.0347, 0.0404, 0.0308, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:17:58,584 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7161, 1.1550, 1.7905, 2.2448, 1.8145, 1.7261, 1.7500, 1.7011], + device='cuda:0'), covar=tensor([0.4502, 0.6321, 0.5849, 0.5471, 0.5481, 0.7287, 0.7228, 0.7324], + device='cuda:0'), in_proj_covar=tensor([0.0422, 0.0408, 0.0498, 0.0505, 0.0450, 0.0475, 0.0482, 0.0486], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:18:09,896 INFO [finetune.py:976] (0/7) Epoch 17, batch 4150, loss[loss=0.1506, simple_loss=0.224, pruned_loss=0.03862, over 4731.00 frames. ], tot_loss[loss=0.183, simple_loss=0.2529, pruned_loss=0.05653, over 951803.01 frames. ], batch size: 27, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:18:10,605 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8969, 2.4691, 2.0849, 2.3327, 1.5054, 1.9577, 2.1806, 1.5966], + device='cuda:0'), covar=tensor([0.2303, 0.1219, 0.0831, 0.1119, 0.3643, 0.1228, 0.1798, 0.2688], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0305, 0.0218, 0.0278, 0.0311, 0.0258, 0.0249, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1371e-04, 1.2135e-04, 8.6750e-05, 1.1014e-04, 1.2636e-04, 1.0266e-04, + 1.0053e-04, 1.0510e-04], device='cuda:0') +2023-04-27 10:18:21,831 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=95809.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 10:18:24,188 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95812.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 10:18:43,505 INFO [finetune.py:976] (0/7) Epoch 17, batch 4200, loss[loss=0.2013, simple_loss=0.2654, pruned_loss=0.06857, over 4832.00 frames. ], tot_loss[loss=0.1827, simple_loss=0.2535, pruned_loss=0.056, over 954038.93 frames. ], batch size: 30, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:18:46,061 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95847.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:18:49,671 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.908e+01 1.602e+02 1.963e+02 2.412e+02 3.992e+02, threshold=3.927e+02, percent-clipped=2.0 +2023-04-27 10:19:03,553 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5699, 1.4257, 4.3667, 4.0623, 3.8360, 4.1737, 4.0397, 3.8625], + device='cuda:0'), covar=tensor([0.6855, 0.5617, 0.1104, 0.1685, 0.1158, 0.1614, 0.1298, 0.1428], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0306, 0.0401, 0.0405, 0.0347, 0.0405, 0.0309, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:19:04,176 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95873.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 10:19:16,331 INFO [finetune.py:976] (0/7) Epoch 17, batch 4250, loss[loss=0.1572, simple_loss=0.2292, pruned_loss=0.04259, over 4795.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2513, pruned_loss=0.05499, over 954693.21 frames. ], batch size: 29, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:19:26,032 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=95908.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 10:19:31,923 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95916.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:19:36,868 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95924.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:19:39,714 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8861, 1.4257, 1.4958, 1.5703, 2.0254, 1.6489, 1.2779, 1.4391], + device='cuda:0'), covar=tensor([0.1378, 0.1309, 0.1770, 0.1135, 0.0704, 0.1354, 0.1999, 0.1936], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0313, 0.0351, 0.0289, 0.0327, 0.0310, 0.0299, 0.0366], + device='cuda:0'), out_proj_covar=tensor([6.3915e-05, 6.5358e-05, 7.4754e-05, 5.8682e-05, 6.7986e-05, 6.5344e-05, + 6.3114e-05, 7.8062e-05], device='cuda:0') +2023-04-27 10:19:48,685 INFO [finetune.py:976] (0/7) Epoch 17, batch 4300, loss[loss=0.1294, simple_loss=0.2056, pruned_loss=0.02657, over 4825.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2483, pruned_loss=0.05403, over 955708.11 frames. ], batch size: 39, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:19:50,247 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-27 10:19:54,839 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.107e+02 1.623e+02 1.872e+02 2.208e+02 5.069e+02, threshold=3.743e+02, percent-clipped=1.0 +2023-04-27 10:20:03,621 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=95964.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:20:09,299 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=95972.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:20:09,895 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=95973.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:20:14,625 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=95980.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:20:22,491 INFO [finetune.py:976] (0/7) Epoch 17, batch 4350, loss[loss=0.1389, simple_loss=0.2033, pruned_loss=0.0372, over 4776.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.245, pruned_loss=0.05286, over 954640.61 frames. ], batch size: 28, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:20:23,781 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=95995.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:20:26,930 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-96000.pt +2023-04-27 10:20:28,816 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-27 10:21:07,894 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96041.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:21:08,056 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-27 10:21:08,979 INFO [finetune.py:976] (0/7) Epoch 17, batch 4400, loss[loss=0.2068, simple_loss=0.2607, pruned_loss=0.07645, over 4820.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2454, pruned_loss=0.0528, over 954467.84 frames. ], batch size: 30, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:21:09,038 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=96043.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:21:15,035 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.713e+01 1.567e+02 1.808e+02 2.181e+02 3.991e+02, threshold=3.617e+02, percent-clipped=1.0 +2023-04-27 10:21:23,114 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2957, 1.5866, 1.3997, 1.8156, 1.7890, 1.8985, 1.4428, 3.6282], + device='cuda:0'), covar=tensor([0.0583, 0.0805, 0.0829, 0.1151, 0.0595, 0.0534, 0.0773, 0.0123], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 10:21:34,785 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96080.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:21:48,839 INFO [finetune.py:976] (0/7) Epoch 17, batch 4450, loss[loss=0.2062, simple_loss=0.2749, pruned_loss=0.06875, over 4887.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2488, pruned_loss=0.054, over 955282.10 frames. ], batch size: 32, lr: 3.38e-03, grad_scale: 32.0 +2023-04-27 10:22:11,872 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 10:22:53,799 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96141.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:22:54,919 INFO [finetune.py:976] (0/7) Epoch 17, batch 4500, loss[loss=0.1958, simple_loss=0.2773, pruned_loss=0.05719, over 4838.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2506, pruned_loss=0.05437, over 955382.30 frames. ], batch size: 49, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:23:02,754 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.142e+02 1.688e+02 1.990e+02 2.416e+02 4.917e+02, threshold=3.981e+02, percent-clipped=4.0 +2023-04-27 10:23:16,201 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96165.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:23:17,975 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96168.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 10:23:58,145 INFO [finetune.py:976] (0/7) Epoch 17, batch 4550, loss[loss=0.2236, simple_loss=0.2811, pruned_loss=0.08307, over 4903.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2516, pruned_loss=0.05435, over 956000.78 frames. ], batch size: 36, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:24:10,600 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96203.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 10:24:42,298 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96226.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:24:47,440 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8465, 1.4127, 1.9484, 2.3321, 1.9700, 1.8652, 1.9147, 1.8996], + device='cuda:0'), covar=tensor([0.4098, 0.6157, 0.5692, 0.5534, 0.5280, 0.7149, 0.7162, 0.8096], + device='cuda:0'), in_proj_covar=tensor([0.0420, 0.0407, 0.0497, 0.0504, 0.0449, 0.0475, 0.0481, 0.0486], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:24:52,260 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-04-27 10:24:54,976 INFO [finetune.py:976] (0/7) Epoch 17, batch 4600, loss[loss=0.1627, simple_loss=0.235, pruned_loss=0.04523, over 4810.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2505, pruned_loss=0.05357, over 955376.95 frames. ], batch size: 41, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:25:01,052 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.153e+02 1.538e+02 1.819e+02 2.307e+02 5.540e+02, threshold=3.639e+02, percent-clipped=2.0 +2023-04-27 10:25:13,314 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96273.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:25:20,407 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-27 10:25:27,833 INFO [finetune.py:976] (0/7) Epoch 17, batch 4650, loss[loss=0.146, simple_loss=0.2222, pruned_loss=0.03489, over 4840.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2485, pruned_loss=0.05335, over 954842.78 frames. ], batch size: 49, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:25:45,382 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=96321.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:25:47,227 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96324.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:25:56,371 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96336.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:26:01,178 INFO [finetune.py:976] (0/7) Epoch 17, batch 4700, loss[loss=0.1761, simple_loss=0.2369, pruned_loss=0.05764, over 4937.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2452, pruned_loss=0.05242, over 957077.85 frames. ], batch size: 38, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:26:07,579 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.173e+02 1.665e+02 1.923e+02 2.344e+02 4.487e+02, threshold=3.847e+02, percent-clipped=4.0 +2023-04-27 10:26:13,640 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4715, 3.0869, 1.0515, 1.6174, 2.4890, 1.5521, 4.3289, 2.3978], + device='cuda:0'), covar=tensor([0.0681, 0.0696, 0.0861, 0.1360, 0.0515, 0.1034, 0.0232, 0.0583], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0049, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 10:26:38,814 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96385.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:26:48,037 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96390.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:26:55,841 INFO [finetune.py:976] (0/7) Epoch 17, batch 4750, loss[loss=0.1923, simple_loss=0.2641, pruned_loss=0.06023, over 4772.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2435, pruned_loss=0.05216, over 955514.11 frames. ], batch size: 28, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:27:44,797 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96436.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:27:50,989 INFO [finetune.py:976] (0/7) Epoch 17, batch 4800, loss[loss=0.218, simple_loss=0.2986, pruned_loss=0.06869, over 4813.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2459, pruned_loss=0.05308, over 952903.96 frames. ], batch size: 40, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:27:56,399 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96451.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:27:57,983 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.109e+02 1.553e+02 2.009e+02 2.369e+02 4.346e+02, threshold=4.018e+02, percent-clipped=1.0 +2023-04-27 10:28:07,177 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96468.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 10:28:34,487 INFO [finetune.py:976] (0/7) Epoch 17, batch 4850, loss[loss=0.1818, simple_loss=0.2625, pruned_loss=0.05054, over 4813.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2484, pruned_loss=0.05405, over 952135.78 frames. ], batch size: 40, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:28:41,612 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96503.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:28:49,909 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=96516.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 10:28:52,916 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96521.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:29:07,105 INFO [finetune.py:976] (0/7) Epoch 17, batch 4900, loss[loss=0.1622, simple_loss=0.2261, pruned_loss=0.0491, over 4150.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2497, pruned_loss=0.05464, over 952149.26 frames. ], batch size: 17, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:29:13,473 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=96551.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:29:15,081 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.788e+01 1.547e+02 1.882e+02 2.260e+02 4.956e+02, threshold=3.763e+02, percent-clipped=3.0 +2023-04-27 10:29:39,870 INFO [finetune.py:976] (0/7) Epoch 17, batch 4950, loss[loss=0.174, simple_loss=0.2531, pruned_loss=0.04744, over 4904.00 frames. ], tot_loss[loss=0.181, simple_loss=0.2518, pruned_loss=0.05514, over 952386.61 frames. ], batch size: 36, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:29:57,223 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3106, 1.7653, 1.6803, 2.0489, 1.9520, 2.0750, 1.6432, 4.3538], + device='cuda:0'), covar=tensor([0.0550, 0.0748, 0.0775, 0.1157, 0.0607, 0.0494, 0.0736, 0.0099], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0039, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 10:30:02,110 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9850, 1.8177, 1.6533, 1.4597, 1.9825, 1.6948, 2.4153, 1.4752], + device='cuda:0'), covar=tensor([0.3375, 0.1904, 0.4695, 0.3028, 0.1593, 0.2172, 0.1272, 0.4436], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0346, 0.0426, 0.0355, 0.0380, 0.0379, 0.0369, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:30:09,388 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96636.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:30:13,517 INFO [finetune.py:976] (0/7) Epoch 17, batch 5000, loss[loss=0.1811, simple_loss=0.2493, pruned_loss=0.05642, over 4906.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2493, pruned_loss=0.05396, over 953253.13 frames. ], batch size: 37, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:30:21,474 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.257e+01 1.582e+02 1.781e+02 2.065e+02 3.108e+02, threshold=3.561e+02, percent-clipped=0.0 +2023-04-27 10:30:38,898 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96680.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:30:38,978 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1328, 1.7702, 2.0494, 2.1400, 2.0744, 1.7142, 1.2356, 1.7408], + device='cuda:0'), covar=tensor([0.3189, 0.2939, 0.1729, 0.2292, 0.2231, 0.2441, 0.4106, 0.1891], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0247, 0.0227, 0.0317, 0.0218, 0.0231, 0.0229, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 10:30:41,298 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=96684.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:30:46,661 INFO [finetune.py:976] (0/7) Epoch 17, batch 5050, loss[loss=0.1877, simple_loss=0.2629, pruned_loss=0.05631, over 4820.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2467, pruned_loss=0.05344, over 951661.99 frames. ], batch size: 39, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:31:15,656 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96736.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:31:19,859 INFO [finetune.py:976] (0/7) Epoch 17, batch 5100, loss[loss=0.1961, simple_loss=0.2589, pruned_loss=0.06667, over 4920.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2449, pruned_loss=0.05297, over 952628.25 frames. ], batch size: 37, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:31:21,738 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=96746.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:31:25,901 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.147e+02 1.614e+02 1.893e+02 2.231e+02 4.752e+02, threshold=3.786e+02, percent-clipped=4.0 +2023-04-27 10:31:41,614 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5282, 2.6113, 2.1622, 2.2706, 2.6327, 2.3046, 3.5535, 1.9891], + device='cuda:0'), covar=tensor([0.3774, 0.2283, 0.4720, 0.3359, 0.1844, 0.2746, 0.1384, 0.4320], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0346, 0.0426, 0.0355, 0.0379, 0.0379, 0.0369, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:31:54,175 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=96776.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:32:04,478 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=96784.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:32:07,604 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2244, 1.6076, 1.5560, 2.0991, 2.2877, 1.9103, 1.9030, 1.6628], + device='cuda:0'), covar=tensor([0.2160, 0.1847, 0.1920, 0.1400, 0.1360, 0.2096, 0.2219, 0.2094], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0310, 0.0350, 0.0287, 0.0326, 0.0308, 0.0298, 0.0364], + device='cuda:0'), out_proj_covar=tensor([6.3220e-05, 6.4674e-05, 7.4479e-05, 5.8227e-05, 6.7847e-05, 6.4803e-05, + 6.2826e-05, 7.7684e-05], device='cuda:0') +2023-04-27 10:32:15,342 INFO [finetune.py:976] (0/7) Epoch 17, batch 5150, loss[loss=0.162, simple_loss=0.217, pruned_loss=0.0535, over 4383.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2437, pruned_loss=0.05269, over 951984.95 frames. ], batch size: 19, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:32:46,400 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96821.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:32:58,342 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5516, 1.1022, 1.6667, 2.0473, 1.6849, 1.5657, 1.6233, 1.6317], + device='cuda:0'), covar=tensor([0.3798, 0.5736, 0.4838, 0.4775, 0.4922, 0.6709, 0.6229, 0.6432], + device='cuda:0'), in_proj_covar=tensor([0.0419, 0.0407, 0.0496, 0.0503, 0.0449, 0.0474, 0.0481, 0.0483], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:33:03,928 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=96837.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:33:12,851 INFO [finetune.py:976] (0/7) Epoch 17, batch 5200, loss[loss=0.2056, simple_loss=0.2739, pruned_loss=0.06862, over 4769.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2464, pruned_loss=0.05344, over 953971.05 frames. ], batch size: 54, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:33:15,440 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2033, 1.4487, 1.3703, 1.6372, 1.6445, 1.9402, 1.3195, 3.5145], + device='cuda:0'), covar=tensor([0.0594, 0.0801, 0.0790, 0.1170, 0.0606, 0.0539, 0.0781, 0.0134], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 10:33:24,746 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.802e+01 1.740e+02 2.121e+02 2.596e+02 7.000e+02, threshold=4.242e+02, percent-clipped=4.0 +2023-04-27 10:33:47,094 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=96869.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:34:02,258 INFO [finetune.py:976] (0/7) Epoch 17, batch 5250, loss[loss=0.2092, simple_loss=0.2779, pruned_loss=0.07023, over 4893.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2479, pruned_loss=0.05381, over 953608.62 frames. ], batch size: 43, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:34:25,966 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8000, 3.6514, 2.7418, 4.3893, 3.7792, 3.7894, 1.7765, 3.8413], + device='cuda:0'), covar=tensor([0.1723, 0.1189, 0.3280, 0.1604, 0.3132, 0.1775, 0.5695, 0.2450], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0213, 0.0249, 0.0306, 0.0298, 0.0249, 0.0273, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 10:34:35,558 INFO [finetune.py:976] (0/7) Epoch 17, batch 5300, loss[loss=0.1824, simple_loss=0.2605, pruned_loss=0.0521, over 4814.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2501, pruned_loss=0.05416, over 953734.67 frames. ], batch size: 40, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:34:38,191 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.12 vs. limit=5.0 +2023-04-27 10:34:41,676 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.524e+02 1.845e+02 2.298e+02 4.151e+02, threshold=3.690e+02, percent-clipped=0.0 +2023-04-27 10:35:00,592 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=96980.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:35:08,783 INFO [finetune.py:976] (0/7) Epoch 17, batch 5350, loss[loss=0.1982, simple_loss=0.2645, pruned_loss=0.06595, over 4920.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.251, pruned_loss=0.0542, over 955833.11 frames. ], batch size: 37, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:35:13,314 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97000.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:35:33,059 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=97028.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:35:42,642 INFO [finetune.py:976] (0/7) Epoch 17, batch 5400, loss[loss=0.1362, simple_loss=0.1991, pruned_loss=0.03672, over 4716.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2495, pruned_loss=0.05445, over 954195.28 frames. ], batch size: 23, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:35:44,573 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97046.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:35:45,309 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.98 vs. limit=5.0 +2023-04-27 10:35:48,747 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.116e+02 1.600e+02 1.805e+02 2.105e+02 4.244e+02, threshold=3.609e+02, percent-clipped=1.0 +2023-04-27 10:35:53,791 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97061.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:36:15,423 INFO [finetune.py:976] (0/7) Epoch 17, batch 5450, loss[loss=0.2102, simple_loss=0.2568, pruned_loss=0.08178, over 4198.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.246, pruned_loss=0.05361, over 954455.14 frames. ], batch size: 65, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:36:16,112 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=97094.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:36:40,627 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97132.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:36:47,755 INFO [finetune.py:976] (0/7) Epoch 17, batch 5500, loss[loss=0.1833, simple_loss=0.2643, pruned_loss=0.05114, over 4901.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2427, pruned_loss=0.05249, over 954318.13 frames. ], batch size: 35, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:36:54,340 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.109e+02 1.627e+02 1.973e+02 2.284e+02 4.082e+02, threshold=3.945e+02, percent-clipped=1.0 +2023-04-27 10:37:37,879 INFO [finetune.py:976] (0/7) Epoch 17, batch 5550, loss[loss=0.205, simple_loss=0.275, pruned_loss=0.06749, over 4894.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2437, pruned_loss=0.05236, over 951368.61 frames. ], batch size: 37, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:38:15,509 INFO [finetune.py:976] (0/7) Epoch 17, batch 5600, loss[loss=0.1943, simple_loss=0.2641, pruned_loss=0.06224, over 4776.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2475, pruned_loss=0.05358, over 952361.08 frames. ], batch size: 26, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:38:26,564 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.758e+02 2.119e+02 2.585e+02 7.806e+02, threshold=4.239e+02, percent-clipped=5.0 +2023-04-27 10:38:37,064 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97260.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:39:14,493 INFO [finetune.py:976] (0/7) Epoch 17, batch 5650, loss[loss=0.1797, simple_loss=0.2484, pruned_loss=0.05553, over 4891.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2502, pruned_loss=0.05428, over 952833.64 frames. ], batch size: 32, lr: 3.37e-03, grad_scale: 32.0 +2023-04-27 10:39:24,618 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.4710, 1.3287, 1.3590, 1.0659, 1.3761, 1.1477, 1.7338, 1.2600], + device='cuda:0'), covar=tensor([0.3221, 0.1662, 0.4897, 0.2574, 0.1389, 0.2132, 0.1491, 0.4792], + device='cuda:0'), in_proj_covar=tensor([0.0334, 0.0342, 0.0422, 0.0351, 0.0377, 0.0376, 0.0367, 0.0414], + device='cuda:0'), out_proj_covar=tensor([9.9846e-05, 1.0302e-04, 1.2852e-04, 1.0611e-04, 1.1276e-04, 1.1263e-04, + 1.0809e-04, 1.2567e-04], device='cuda:0') +2023-04-27 10:39:47,532 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97321.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:40:09,459 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 10:40:16,420 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97342.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:40:16,914 INFO [finetune.py:976] (0/7) Epoch 17, batch 5700, loss[loss=0.1468, simple_loss=0.2088, pruned_loss=0.04238, over 4217.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2455, pruned_loss=0.05341, over 934678.64 frames. ], batch size: 18, lr: 3.36e-03, grad_scale: 32.0 +2023-04-27 10:40:19,383 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97347.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:40:22,861 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.291e+01 1.370e+02 1.630e+02 1.988e+02 3.241e+02, threshold=3.261e+02, percent-clipped=0.0 +2023-04-27 10:40:24,680 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97356.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:40:33,781 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-17.pt +2023-04-27 10:40:46,576 INFO [finetune.py:976] (0/7) Epoch 18, batch 0, loss[loss=0.2069, simple_loss=0.2795, pruned_loss=0.06711, over 4878.00 frames. ], tot_loss[loss=0.2069, simple_loss=0.2795, pruned_loss=0.06711, over 4878.00 frames. ], batch size: 35, lr: 3.36e-03, grad_scale: 64.0 +2023-04-27 10:40:46,577 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 10:41:03,167 INFO [finetune.py:1010] (0/7) Epoch 18, validation: loss=0.1537, simple_loss=0.225, pruned_loss=0.04121, over 2265189.00 frames. +2023-04-27 10:41:03,168 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 10:41:05,488 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97374.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:41:28,962 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97403.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 10:41:32,013 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97408.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:41:40,835 INFO [finetune.py:976] (0/7) Epoch 18, batch 50, loss[loss=0.2015, simple_loss=0.2795, pruned_loss=0.0617, over 4812.00 frames. ], tot_loss[loss=0.1802, simple_loss=0.2524, pruned_loss=0.05404, over 214081.10 frames. ], batch size: 39, lr: 3.36e-03, grad_scale: 64.0 +2023-04-27 10:41:49,629 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97432.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:41:51,454 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97435.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:42:02,221 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.680e+01 1.493e+02 1.785e+02 2.115e+02 3.636e+02, threshold=3.569e+02, percent-clipped=3.0 +2023-04-27 10:42:14,117 INFO [finetune.py:976] (0/7) Epoch 18, batch 100, loss[loss=0.1737, simple_loss=0.2399, pruned_loss=0.05372, over 4748.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2463, pruned_loss=0.05386, over 378431.69 frames. ], batch size: 59, lr: 3.36e-03, grad_scale: 64.0 +2023-04-27 10:42:22,015 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=97480.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:42:23,951 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.36 vs. limit=5.0 +2023-04-27 10:42:25,738 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.51 vs. limit=5.0 +2023-04-27 10:42:30,671 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-27 10:42:47,723 INFO [finetune.py:976] (0/7) Epoch 18, batch 150, loss[loss=0.1731, simple_loss=0.2433, pruned_loss=0.05145, over 4762.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2428, pruned_loss=0.05298, over 507769.31 frames. ], batch size: 26, lr: 3.36e-03, grad_scale: 64.0 +2023-04-27 10:43:03,120 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6817, 3.6564, 2.6771, 4.3649, 3.8281, 3.7053, 1.5042, 3.7644], + device='cuda:0'), covar=tensor([0.1914, 0.1322, 0.3457, 0.1617, 0.3120, 0.2007, 0.6353, 0.2288], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0212, 0.0247, 0.0303, 0.0297, 0.0249, 0.0272, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 10:43:08,512 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.668e+02 1.928e+02 2.305e+02 4.422e+02, threshold=3.856e+02, percent-clipped=2.0 +2023-04-27 10:43:09,854 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97555.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:43:19,854 INFO [finetune.py:976] (0/7) Epoch 18, batch 200, loss[loss=0.2126, simple_loss=0.2735, pruned_loss=0.07588, over 4108.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2427, pruned_loss=0.05425, over 607762.67 frames. ], batch size: 65, lr: 3.36e-03, grad_scale: 64.0 +2023-04-27 10:43:32,233 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0474, 2.4953, 0.9110, 1.4614, 1.9566, 1.2505, 3.3178, 1.8552], + device='cuda:0'), covar=tensor([0.0699, 0.0591, 0.0849, 0.1323, 0.0510, 0.1072, 0.0214, 0.0632], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0074, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 10:43:55,460 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97616.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:43:55,518 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97616.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:43:58,890 INFO [finetune.py:976] (0/7) Epoch 18, batch 250, loss[loss=0.171, simple_loss=0.2499, pruned_loss=0.04609, over 4899.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.246, pruned_loss=0.05473, over 684740.62 frames. ], batch size: 35, lr: 3.36e-03, grad_scale: 64.0 +2023-04-27 10:44:22,346 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5216, 3.0623, 1.0933, 1.9199, 1.7556, 2.3945, 1.8498, 1.2507], + device='cuda:0'), covar=tensor([0.1183, 0.0820, 0.1734, 0.1043, 0.0982, 0.0834, 0.1303, 0.1826], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0243, 0.0137, 0.0121, 0.0133, 0.0153, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 10:44:29,797 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4478, 2.8850, 0.9670, 1.6881, 2.3067, 1.5712, 4.1864, 2.1436], + device='cuda:0'), covar=tensor([0.0647, 0.0792, 0.0940, 0.1287, 0.0506, 0.1000, 0.0253, 0.0605], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0046, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 10:44:42,422 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.696e+02 2.035e+02 2.379e+02 5.416e+02, threshold=4.070e+02, percent-clipped=1.0 +2023-04-27 10:44:49,677 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97656.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:45:03,747 INFO [finetune.py:976] (0/7) Epoch 18, batch 300, loss[loss=0.1899, simple_loss=0.264, pruned_loss=0.05787, over 4826.00 frames. ], tot_loss[loss=0.1813, simple_loss=0.2503, pruned_loss=0.05612, over 742234.07 frames. ], batch size: 33, lr: 3.36e-03, grad_scale: 64.0 +2023-04-27 10:45:09,976 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97674.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 10:45:43,984 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97698.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 10:45:47,045 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97703.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:45:47,652 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=97704.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:46:04,742 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7347, 1.3848, 1.8371, 2.2363, 1.8148, 1.6520, 1.7268, 1.7292], + device='cuda:0'), covar=tensor([0.4893, 0.7216, 0.7057, 0.6053, 0.6468, 0.8842, 0.8736, 1.0089], + device='cuda:0'), in_proj_covar=tensor([0.0421, 0.0407, 0.0497, 0.0504, 0.0450, 0.0477, 0.0483, 0.0487], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:46:07,659 INFO [finetune.py:976] (0/7) Epoch 18, batch 350, loss[loss=0.2004, simple_loss=0.2708, pruned_loss=0.06494, over 4914.00 frames. ], tot_loss[loss=0.1825, simple_loss=0.2516, pruned_loss=0.05671, over 788081.45 frames. ], batch size: 36, lr: 3.36e-03, grad_scale: 64.0 +2023-04-27 10:46:18,776 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97730.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:46:28,136 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97735.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 10:46:51,633 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.284e+02 1.687e+02 1.979e+02 2.374e+02 3.417e+02, threshold=3.957e+02, percent-clipped=0.0 +2023-04-27 10:47:08,342 INFO [finetune.py:976] (0/7) Epoch 18, batch 400, loss[loss=0.1775, simple_loss=0.2454, pruned_loss=0.0548, over 4908.00 frames. ], tot_loss[loss=0.1823, simple_loss=0.2522, pruned_loss=0.05626, over 826920.19 frames. ], batch size: 36, lr: 3.36e-03, grad_scale: 64.0 +2023-04-27 10:47:32,315 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6068, 3.5843, 2.6496, 4.2145, 3.6955, 3.6185, 1.6787, 3.5993], + device='cuda:0'), covar=tensor([0.1774, 0.1148, 0.2823, 0.1681, 0.2878, 0.1835, 0.5211, 0.2298], + device='cuda:0'), in_proj_covar=tensor([0.0236, 0.0207, 0.0242, 0.0296, 0.0290, 0.0243, 0.0265, 0.0264], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 10:47:42,004 INFO [finetune.py:976] (0/7) Epoch 18, batch 450, loss[loss=0.1838, simple_loss=0.2468, pruned_loss=0.06041, over 4822.00 frames. ], tot_loss[loss=0.1806, simple_loss=0.2503, pruned_loss=0.05544, over 853710.86 frames. ], batch size: 41, lr: 3.36e-03, grad_scale: 32.0 +2023-04-27 10:47:42,716 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5932, 2.9545, 1.0195, 1.7804, 2.1921, 1.5138, 4.1077, 2.3555], + device='cuda:0'), covar=tensor([0.0594, 0.0725, 0.0887, 0.1261, 0.0506, 0.0975, 0.0208, 0.0538], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0074, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 10:48:05,039 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.551e+02 1.835e+02 2.204e+02 3.781e+02, threshold=3.670e+02, percent-clipped=0.0 +2023-04-27 10:48:15,396 INFO [finetune.py:976] (0/7) Epoch 18, batch 500, loss[loss=0.1498, simple_loss=0.2205, pruned_loss=0.03958, over 4091.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2479, pruned_loss=0.05426, over 876425.82 frames. ], batch size: 65, lr: 3.36e-03, grad_scale: 32.0 +2023-04-27 10:48:26,738 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=97888.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:48:42,843 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=97911.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:48:45,958 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97916.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:48:48,920 INFO [finetune.py:976] (0/7) Epoch 18, batch 550, loss[loss=0.1638, simple_loss=0.2381, pruned_loss=0.04472, over 4830.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2447, pruned_loss=0.0531, over 895541.73 frames. ], batch size: 51, lr: 3.36e-03, grad_scale: 32.0 +2023-04-27 10:48:59,955 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6858, 3.4860, 2.8786, 3.1293, 2.4985, 3.0271, 3.0056, 2.3135], + device='cuda:0'), covar=tensor([0.2181, 0.1095, 0.0752, 0.1329, 0.3066, 0.1169, 0.1942, 0.3019], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0306, 0.0220, 0.0280, 0.0313, 0.0259, 0.0251, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1482e-04, 1.2144e-04, 8.7295e-05, 1.1117e-04, 1.2719e-04, 1.0311e-04, + 1.0138e-04, 1.0540e-04], device='cuda:0') +2023-04-27 10:49:09,120 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6586, 1.6288, 0.6502, 1.3953, 1.7627, 1.5495, 1.4493, 1.5062], + device='cuda:0'), covar=tensor([0.0486, 0.0376, 0.0372, 0.0550, 0.0263, 0.0508, 0.0498, 0.0546], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 10:49:09,147 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=97949.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:49:12,032 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.221e+01 1.571e+02 1.914e+02 2.299e+02 4.493e+02, threshold=3.828e+02, percent-clipped=2.0 +2023-04-27 10:49:18,164 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=97964.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:49:22,479 INFO [finetune.py:976] (0/7) Epoch 18, batch 600, loss[loss=0.192, simple_loss=0.2692, pruned_loss=0.05741, over 4930.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2437, pruned_loss=0.0522, over 911074.12 frames. ], batch size: 38, lr: 3.36e-03, grad_scale: 32.0 +2023-04-27 10:49:23,138 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9733, 2.3717, 0.8536, 1.2077, 1.6679, 1.1063, 3.2877, 1.5341], + device='cuda:0'), covar=tensor([0.0883, 0.0956, 0.1049, 0.1704, 0.0719, 0.1441, 0.0342, 0.0949], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0049, 0.0047, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 10:49:35,800 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8737, 1.7405, 1.6420, 1.4569, 1.8973, 1.5670, 2.3969, 1.4550], + device='cuda:0'), covar=tensor([0.3626, 0.2053, 0.5466, 0.3229, 0.1692, 0.2431, 0.1365, 0.4901], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0346, 0.0427, 0.0356, 0.0381, 0.0379, 0.0371, 0.0418], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:49:41,020 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=97998.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:49:43,261 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-98000.pt +2023-04-27 10:49:46,916 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98003.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:49:57,854 INFO [finetune.py:976] (0/7) Epoch 18, batch 650, loss[loss=0.2268, simple_loss=0.2992, pruned_loss=0.07717, over 4841.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.246, pruned_loss=0.05235, over 921975.41 frames. ], batch size: 47, lr: 3.36e-03, grad_scale: 32.0 +2023-04-27 10:50:03,543 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98030.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 10:50:03,575 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98030.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:50:14,375 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=98046.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:50:17,918 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=98051.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:50:26,531 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.729e+02 2.021e+02 2.442e+02 4.739e+02, threshold=4.043e+02, percent-clipped=3.0 +2023-04-27 10:50:48,619 INFO [finetune.py:976] (0/7) Epoch 18, batch 700, loss[loss=0.1449, simple_loss=0.214, pruned_loss=0.03788, over 4678.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.248, pruned_loss=0.05323, over 929055.33 frames. ], batch size: 23, lr: 3.36e-03, grad_scale: 32.0 +2023-04-27 10:50:58,802 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=98078.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:51:25,132 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4709, 3.2008, 2.5313, 2.9303, 2.1797, 2.6594, 2.7721, 1.9020], + device='cuda:0'), covar=tensor([0.2294, 0.1242, 0.0873, 0.1348, 0.3683, 0.1403, 0.2078, 0.3198], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0307, 0.0220, 0.0282, 0.0315, 0.0261, 0.0251, 0.0267], + device='cuda:0'), out_proj_covar=tensor([1.1536e-04, 1.2191e-04, 8.7647e-05, 1.1178e-04, 1.2787e-04, 1.0353e-04, + 1.0175e-04, 1.0598e-04], device='cuda:0') +2023-04-27 10:51:33,383 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98106.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:51:47,602 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-27 10:51:54,812 INFO [finetune.py:976] (0/7) Epoch 18, batch 750, loss[loss=0.1845, simple_loss=0.2539, pruned_loss=0.05754, over 4800.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2484, pruned_loss=0.05299, over 931072.77 frames. ], batch size: 25, lr: 3.36e-03, grad_scale: 32.0 +2023-04-27 10:51:57,992 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2308, 2.7427, 2.2564, 2.6142, 1.8981, 2.3430, 2.2210, 1.7234], + device='cuda:0'), covar=tensor([0.1870, 0.1000, 0.0785, 0.1163, 0.3004, 0.1198, 0.1973, 0.2653], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0305, 0.0220, 0.0280, 0.0313, 0.0260, 0.0250, 0.0266], + device='cuda:0'), out_proj_covar=tensor([1.1482e-04, 1.2131e-04, 8.7271e-05, 1.1129e-04, 1.2717e-04, 1.0315e-04, + 1.0128e-04, 1.0551e-04], device='cuda:0') +2023-04-27 10:52:31,990 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.562e+02 1.753e+02 1.961e+02 3.537e+02, threshold=3.506e+02, percent-clipped=0.0 +2023-04-27 10:52:42,184 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98167.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:52:44,510 INFO [finetune.py:976] (0/7) Epoch 18, batch 800, loss[loss=0.1592, simple_loss=0.237, pruned_loss=0.04063, over 4787.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2495, pruned_loss=0.05339, over 936615.39 frames. ], batch size: 25, lr: 3.36e-03, grad_scale: 32.0 +2023-04-27 10:53:08,792 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 10:53:10,395 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98211.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:53:17,963 INFO [finetune.py:976] (0/7) Epoch 18, batch 850, loss[loss=0.1427, simple_loss=0.2175, pruned_loss=0.03396, over 4771.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2466, pruned_loss=0.05208, over 939079.38 frames. ], batch size: 28, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 10:53:24,889 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 10:53:32,031 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98244.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:53:38,451 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.063e+02 1.514e+02 1.837e+02 2.117e+02 3.312e+02, threshold=3.674e+02, percent-clipped=0.0 +2023-04-27 10:53:42,070 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=98259.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:53:51,234 INFO [finetune.py:976] (0/7) Epoch 18, batch 900, loss[loss=0.1845, simple_loss=0.2479, pruned_loss=0.06048, over 4865.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2446, pruned_loss=0.05158, over 944622.27 frames. ], batch size: 34, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 10:54:10,932 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2959, 2.8386, 2.1728, 2.3127, 1.6176, 1.5736, 2.4483, 1.5516], + device='cuda:0'), covar=tensor([0.1520, 0.1467, 0.1352, 0.1567, 0.2180, 0.1846, 0.0881, 0.1940], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0212, 0.0168, 0.0204, 0.0199, 0.0183, 0.0156, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 10:54:24,149 INFO [finetune.py:976] (0/7) Epoch 18, batch 950, loss[loss=0.165, simple_loss=0.234, pruned_loss=0.04804, over 4829.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2437, pruned_loss=0.05204, over 946155.90 frames. ], batch size: 33, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 10:54:30,318 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98330.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 10:54:38,324 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5626, 0.6674, 1.5138, 1.8941, 1.6196, 1.5012, 1.4860, 1.5368], + device='cuda:0'), covar=tensor([0.4257, 0.6300, 0.5555, 0.5758, 0.5216, 0.7267, 0.7074, 0.7334], + device='cuda:0'), in_proj_covar=tensor([0.0420, 0.0407, 0.0495, 0.0502, 0.0449, 0.0476, 0.0484, 0.0487], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:54:44,940 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.018e+02 1.562e+02 1.785e+02 2.020e+02 3.278e+02, threshold=3.570e+02, percent-clipped=0.0 +2023-04-27 10:54:49,419 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-27 10:54:57,903 INFO [finetune.py:976] (0/7) Epoch 18, batch 1000, loss[loss=0.1412, simple_loss=0.2164, pruned_loss=0.033, over 4834.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2451, pruned_loss=0.0525, over 947851.38 frames. ], batch size: 25, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 10:55:02,792 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=98378.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 10:55:10,728 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6709, 2.1479, 1.6426, 1.5660, 1.1889, 1.2268, 1.6882, 1.1533], + device='cuda:0'), covar=tensor([0.1710, 0.1412, 0.1556, 0.1849, 0.2376, 0.2065, 0.1037, 0.2120], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0213, 0.0168, 0.0205, 0.0200, 0.0184, 0.0156, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 10:55:22,430 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98410.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:55:29,958 INFO [finetune.py:976] (0/7) Epoch 18, batch 1050, loss[loss=0.1791, simple_loss=0.2608, pruned_loss=0.04868, over 4927.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2488, pruned_loss=0.0533, over 949901.40 frames. ], batch size: 33, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 10:55:52,030 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.114e+02 1.610e+02 1.888e+02 2.252e+02 4.818e+02, threshold=3.776e+02, percent-clipped=3.0 +2023-04-27 10:55:56,958 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98462.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:55:57,694 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-27 10:56:08,980 INFO [finetune.py:976] (0/7) Epoch 18, batch 1100, loss[loss=0.2178, simple_loss=0.2814, pruned_loss=0.07714, over 4816.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2509, pruned_loss=0.054, over 952112.00 frames. ], batch size: 38, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 10:56:09,119 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98471.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:56:29,428 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-27 10:57:13,273 INFO [finetune.py:976] (0/7) Epoch 18, batch 1150, loss[loss=0.1403, simple_loss=0.2205, pruned_loss=0.03005, over 4849.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2507, pruned_loss=0.05389, over 952834.84 frames. ], batch size: 49, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 10:57:45,497 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98543.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:57:46,070 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98544.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:57:57,924 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.919e+01 1.656e+02 1.874e+02 2.209e+02 6.337e+02, threshold=3.749e+02, percent-clipped=3.0 +2023-04-27 10:58:14,178 INFO [finetune.py:976] (0/7) Epoch 18, batch 1200, loss[loss=0.1935, simple_loss=0.264, pruned_loss=0.06152, over 4909.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2494, pruned_loss=0.0536, over 952413.53 frames. ], batch size: 36, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 10:58:23,833 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.66 vs. limit=5.0 +2023-04-27 10:58:28,909 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=98592.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:58:36,414 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98604.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 10:58:41,414 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-27 10:58:46,157 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5104, 1.8955, 1.8449, 2.1812, 2.0553, 2.2223, 1.7961, 3.6683], + device='cuda:0'), covar=tensor([0.0520, 0.0648, 0.0677, 0.0920, 0.0518, 0.0421, 0.0604, 0.0176], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 10:58:46,674 INFO [finetune.py:976] (0/7) Epoch 18, batch 1250, loss[loss=0.1662, simple_loss=0.2174, pruned_loss=0.05743, over 4163.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2466, pruned_loss=0.05281, over 952698.43 frames. ], batch size: 17, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 10:59:10,194 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.558e+02 1.895e+02 2.216e+02 4.222e+02, threshold=3.789e+02, percent-clipped=2.0 +2023-04-27 10:59:20,473 INFO [finetune.py:976] (0/7) Epoch 18, batch 1300, loss[loss=0.1393, simple_loss=0.2202, pruned_loss=0.02924, over 4903.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2434, pruned_loss=0.05164, over 954951.98 frames. ], batch size: 32, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 10:59:44,649 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7102, 1.5089, 1.6324, 1.9890, 1.9654, 1.6065, 1.4210, 1.8889], + device='cuda:0'), covar=tensor([0.0741, 0.1044, 0.0640, 0.0480, 0.0553, 0.0777, 0.0688, 0.0484], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0198, 0.0179, 0.0170, 0.0175, 0.0179, 0.0149, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 10:59:53,702 INFO [finetune.py:976] (0/7) Epoch 18, batch 1350, loss[loss=0.1994, simple_loss=0.2512, pruned_loss=0.07382, over 4813.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2422, pruned_loss=0.05155, over 952102.61 frames. ], batch size: 45, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:00:17,035 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.199e+02 1.614e+02 2.005e+02 2.367e+02 4.015e+02, threshold=4.011e+02, percent-clipped=1.0 +2023-04-27 11:00:20,904 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7820, 1.4482, 1.9556, 2.2712, 1.8266, 1.7773, 1.8542, 1.7764], + device='cuda:0'), covar=tensor([0.4724, 0.6793, 0.6317, 0.5666, 0.5960, 0.8302, 0.8438, 0.9097], + device='cuda:0'), in_proj_covar=tensor([0.0421, 0.0406, 0.0495, 0.0501, 0.0450, 0.0476, 0.0482, 0.0487], + device='cuda:0'), out_proj_covar=tensor([1.0140e-04, 9.9912e-05, 1.1140e-04, 1.1969e-04, 1.0783e-04, 1.1444e-04, + 1.1448e-04, 1.1481e-04], device='cuda:0') +2023-04-27 11:00:22,044 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=98762.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:00:24,428 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98766.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:00:27,468 INFO [finetune.py:976] (0/7) Epoch 18, batch 1400, loss[loss=0.1911, simple_loss=0.2711, pruned_loss=0.05559, over 4762.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2468, pruned_loss=0.05293, over 951862.22 frames. ], batch size: 54, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:00:54,322 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=98810.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:00:59,816 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1256, 1.4177, 1.3413, 1.7003, 1.5520, 1.5952, 1.3796, 2.4818], + device='cuda:0'), covar=tensor([0.0572, 0.0803, 0.0779, 0.1127, 0.0612, 0.0489, 0.0763, 0.0211], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 11:01:00,975 INFO [finetune.py:976] (0/7) Epoch 18, batch 1450, loss[loss=0.1741, simple_loss=0.2504, pruned_loss=0.04891, over 4896.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2486, pruned_loss=0.05295, over 954032.54 frames. ], batch size: 35, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:01:24,435 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.190e+02 1.637e+02 1.928e+02 2.404e+02 4.442e+02, threshold=3.855e+02, percent-clipped=1.0 +2023-04-27 11:01:26,371 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6609, 2.7318, 2.2135, 2.4909, 2.7789, 2.3716, 3.7094, 1.8375], + device='cuda:0'), covar=tensor([0.3799, 0.2049, 0.4230, 0.3300, 0.1942, 0.2767, 0.1389, 0.4616], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0345, 0.0425, 0.0353, 0.0380, 0.0379, 0.0369, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:01:32,970 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9052, 2.8825, 2.1568, 3.3448, 2.8936, 2.8688, 1.2256, 2.8490], + device='cuda:0'), covar=tensor([0.2401, 0.1542, 0.3419, 0.2631, 0.3127, 0.2311, 0.5776, 0.3220], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0213, 0.0249, 0.0303, 0.0297, 0.0249, 0.0272, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 11:01:34,709 INFO [finetune.py:976] (0/7) Epoch 18, batch 1500, loss[loss=0.1687, simple_loss=0.2486, pruned_loss=0.04435, over 4915.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.25, pruned_loss=0.05352, over 951966.76 frames. ], batch size: 38, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:01:52,829 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98890.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:02:02,798 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98895.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:02:11,049 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=98899.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:02:35,628 INFO [finetune.py:976] (0/7) Epoch 18, batch 1550, loss[loss=0.1608, simple_loss=0.2298, pruned_loss=0.04591, over 4691.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.2508, pruned_loss=0.05415, over 951535.55 frames. ], batch size: 23, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:03:17,434 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98951.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:03:19,154 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.300e+01 1.636e+02 1.920e+02 2.366e+02 5.799e+02, threshold=3.840e+02, percent-clipped=2.0 +2023-04-27 11:03:25,933 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=98956.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:03:41,691 INFO [finetune.py:976] (0/7) Epoch 18, batch 1600, loss[loss=0.1678, simple_loss=0.2441, pruned_loss=0.04578, over 4892.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2471, pruned_loss=0.05263, over 953156.12 frames. ], batch size: 32, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:03:41,821 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=98971.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:04:04,913 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5476, 1.6298, 1.4360, 1.1125, 1.1733, 1.1408, 1.4145, 1.0879], + device='cuda:0'), covar=tensor([0.1782, 0.1311, 0.1568, 0.1790, 0.2437, 0.2111, 0.1100, 0.2225], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0213, 0.0168, 0.0205, 0.0200, 0.0184, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 11:04:20,540 INFO [finetune.py:976] (0/7) Epoch 18, batch 1650, loss[loss=0.1362, simple_loss=0.2082, pruned_loss=0.03212, over 4812.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2449, pruned_loss=0.0524, over 955452.90 frames. ], batch size: 51, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:04:27,346 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99032.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:04:31,426 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4987, 1.3762, 4.2705, 3.9683, 3.8192, 4.0954, 4.0401, 3.7543], + device='cuda:0'), covar=tensor([0.7182, 0.5784, 0.1235, 0.1861, 0.1052, 0.1675, 0.1404, 0.1464], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0305, 0.0401, 0.0405, 0.0348, 0.0402, 0.0309, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:04:40,834 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99051.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:04:42,972 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.087e+02 1.609e+02 1.883e+02 2.336e+02 5.190e+02, threshold=3.766e+02, percent-clipped=4.0 +2023-04-27 11:04:50,883 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99066.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:04:53,813 INFO [finetune.py:976] (0/7) Epoch 18, batch 1700, loss[loss=0.1412, simple_loss=0.2288, pruned_loss=0.02684, over 4815.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2434, pruned_loss=0.05235, over 956059.75 frames. ], batch size: 51, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:05:21,715 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99112.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:05:22,858 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=99114.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:05:24,187 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0961, 1.7494, 2.2713, 2.6384, 2.1524, 2.0436, 2.1397, 2.1138], + device='cuda:0'), covar=tensor([0.4853, 0.7772, 0.7378, 0.6134, 0.6748, 0.8816, 0.9235, 0.9714], + device='cuda:0'), in_proj_covar=tensor([0.0423, 0.0407, 0.0498, 0.0503, 0.0451, 0.0478, 0.0484, 0.0489], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:05:27,557 INFO [finetune.py:976] (0/7) Epoch 18, batch 1750, loss[loss=0.2023, simple_loss=0.2742, pruned_loss=0.06519, over 4857.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.244, pruned_loss=0.05253, over 956040.51 frames. ], batch size: 31, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:05:37,021 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-27 11:05:50,007 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.166e+02 1.654e+02 1.955e+02 2.443e+02 4.969e+02, threshold=3.909e+02, percent-clipped=5.0 +2023-04-27 11:05:54,224 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1397, 1.5575, 1.3603, 1.7163, 1.7389, 1.8588, 1.4551, 3.5929], + device='cuda:0'), covar=tensor([0.0589, 0.0774, 0.0782, 0.1181, 0.0597, 0.0485, 0.0738, 0.0108], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 11:06:01,215 INFO [finetune.py:976] (0/7) Epoch 18, batch 1800, loss[loss=0.1776, simple_loss=0.247, pruned_loss=0.05408, over 4888.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2451, pruned_loss=0.05238, over 955976.47 frames. ], batch size: 32, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:06:18,763 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99199.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:06:34,181 INFO [finetune.py:976] (0/7) Epoch 18, batch 1850, loss[loss=0.2185, simple_loss=0.2793, pruned_loss=0.07881, over 4216.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2474, pruned_loss=0.0535, over 954473.25 frames. ], batch size: 65, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:06:35,983 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4363, 2.4589, 2.1461, 2.2179, 2.5439, 2.0968, 3.4932, 1.9332], + device='cuda:0'), covar=tensor([0.3769, 0.2148, 0.4012, 0.3424, 0.1889, 0.2704, 0.1615, 0.4202], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0347, 0.0427, 0.0354, 0.0382, 0.0380, 0.0370, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:06:39,645 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99229.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:06:44,524 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4154, 1.4378, 1.8007, 1.7900, 1.3432, 1.2027, 1.4941, 0.9882], + device='cuda:0'), covar=tensor([0.0581, 0.0687, 0.0384, 0.0584, 0.0802, 0.1106, 0.0596, 0.0593], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0069, 0.0067, 0.0067, 0.0075, 0.0095, 0.0073, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:06:49,924 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99246.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:06:50,532 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=99247.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:06:53,444 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99251.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:06:55,631 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.025e+02 1.735e+02 2.090e+02 2.546e+02 5.570e+02, threshold=4.180e+02, percent-clipped=4.0 +2023-04-27 11:07:06,160 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99268.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:07:07,875 INFO [finetune.py:976] (0/7) Epoch 18, batch 1900, loss[loss=0.1692, simple_loss=0.2387, pruned_loss=0.0498, over 4284.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2497, pruned_loss=0.05363, over 956370.95 frames. ], batch size: 65, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:07:20,135 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99290.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:07:41,286 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7577, 3.5348, 0.8307, 1.8276, 1.9336, 2.5757, 2.0658, 1.0128], + device='cuda:0'), covar=tensor([0.1356, 0.1351, 0.2361, 0.1430, 0.1109, 0.1139, 0.1533, 0.2051], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0243, 0.0137, 0.0121, 0.0132, 0.0153, 0.0118, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:08:03,838 INFO [finetune.py:976] (0/7) Epoch 18, batch 1950, loss[loss=0.2176, simple_loss=0.2691, pruned_loss=0.08306, over 4183.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2473, pruned_loss=0.05273, over 957131.80 frames. ], batch size: 65, lr: 3.35e-03, grad_scale: 32.0 +2023-04-27 11:08:05,837 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99324.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:08:07,580 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99327.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:08:09,369 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99329.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:08:30,195 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.408e+01 1.522e+02 1.826e+02 2.209e+02 4.570e+02, threshold=3.652e+02, percent-clipped=1.0 +2023-04-27 11:08:52,739 INFO [finetune.py:976] (0/7) Epoch 18, batch 2000, loss[loss=0.1298, simple_loss=0.2052, pruned_loss=0.02725, over 4745.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2452, pruned_loss=0.05208, over 958386.18 frames. ], batch size: 23, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:09:12,999 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99385.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:09:44,025 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99407.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:09:54,354 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99420.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:09:54,850 INFO [finetune.py:976] (0/7) Epoch 18, batch 2050, loss[loss=0.1981, simple_loss=0.2584, pruned_loss=0.06888, over 4835.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2426, pruned_loss=0.05175, over 957642.21 frames. ], batch size: 49, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:10:00,374 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9472, 1.8473, 2.3200, 2.4425, 1.8266, 1.6280, 1.9757, 1.1287], + device='cuda:0'), covar=tensor([0.0605, 0.0821, 0.0431, 0.0892, 0.0807, 0.1147, 0.0656, 0.0755], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0069, 0.0068, 0.0068, 0.0076, 0.0096, 0.0074, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:10:01,070 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.82 vs. limit=2.0 +2023-04-27 11:10:15,960 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.635e+02 1.952e+02 2.334e+02 5.427e+02, threshold=3.904e+02, percent-clipped=2.0 +2023-04-27 11:10:26,016 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99468.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 11:10:28,724 INFO [finetune.py:976] (0/7) Epoch 18, batch 2100, loss[loss=0.2085, simple_loss=0.282, pruned_loss=0.06745, over 4865.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2424, pruned_loss=0.05169, over 956197.52 frames. ], batch size: 31, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:10:30,825 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-27 11:10:35,445 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99481.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:11:02,725 INFO [finetune.py:976] (0/7) Epoch 18, batch 2150, loss[loss=0.2019, simple_loss=0.2698, pruned_loss=0.06697, over 4846.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2461, pruned_loss=0.05247, over 956331.95 frames. ], batch size: 49, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:11:07,696 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 11:11:08,223 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99529.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 11:11:18,915 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99546.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:11:20,154 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1751, 1.3957, 1.3419, 1.7122, 1.5005, 1.6284, 1.3565, 3.1011], + device='cuda:0'), covar=tensor([0.0699, 0.1042, 0.1021, 0.1318, 0.0861, 0.0672, 0.1018, 0.0239], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 11:11:21,985 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99551.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:11:23,739 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.609e+02 1.810e+02 2.358e+02 5.458e+02, threshold=3.621e+02, percent-clipped=4.0 +2023-04-27 11:11:32,846 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99568.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:11:35,034 INFO [finetune.py:976] (0/7) Epoch 18, batch 2200, loss[loss=0.1413, simple_loss=0.2186, pruned_loss=0.03196, over 4769.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2489, pruned_loss=0.05334, over 957236.39 frames. ], batch size: 26, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:11:45,025 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99585.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:11:47,529 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99589.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:11:48,808 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-27 11:11:51,022 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=99594.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:11:54,025 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=99599.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:11:54,124 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4670, 1.8095, 1.9009, 2.0205, 1.8511, 1.9505, 1.9789, 1.9231], + device='cuda:0'), covar=tensor([0.3946, 0.5282, 0.4621, 0.4511, 0.5481, 0.7028, 0.5073, 0.4929], + device='cuda:0'), in_proj_covar=tensor([0.0330, 0.0370, 0.0319, 0.0331, 0.0342, 0.0392, 0.0353, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 11:12:08,624 INFO [finetune.py:976] (0/7) Epoch 18, batch 2250, loss[loss=0.1688, simple_loss=0.2373, pruned_loss=0.05011, over 4747.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2494, pruned_loss=0.05302, over 957088.96 frames. ], batch size: 27, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:12:10,997 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99624.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:12:13,833 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99627.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:12:15,116 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99629.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:12:16,846 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3602, 3.2902, 2.4930, 3.8472, 3.2685, 3.3451, 1.3462, 3.3060], + device='cuda:0'), covar=tensor([0.1744, 0.1315, 0.3345, 0.2371, 0.2714, 0.2099, 0.5795, 0.2856], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0216, 0.0252, 0.0308, 0.0302, 0.0252, 0.0275, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 11:12:28,861 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99650.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:12:31,119 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.508e+01 1.589e+02 1.867e+02 2.262e+02 4.430e+02, threshold=3.734e+02, percent-clipped=1.0 +2023-04-27 11:12:41,913 INFO [finetune.py:976] (0/7) Epoch 18, batch 2300, loss[loss=0.1387, simple_loss=0.2215, pruned_loss=0.02794, over 4769.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2488, pruned_loss=0.05258, over 957691.88 frames. ], batch size: 27, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:12:44,904 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=99675.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:12:49,364 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99680.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:12:49,393 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=99680.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:13:12,561 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99707.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:13:26,444 INFO [finetune.py:976] (0/7) Epoch 18, batch 2350, loss[loss=0.1694, simple_loss=0.2361, pruned_loss=0.05141, over 4762.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2468, pruned_loss=0.05211, over 958020.53 frames. ], batch size: 54, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:13:56,962 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=99741.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:14:16,633 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.069e+02 1.602e+02 1.875e+02 2.250e+02 5.277e+02, threshold=3.750e+02, percent-clipped=1.0 +2023-04-27 11:14:17,850 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=99755.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:14:33,758 INFO [finetune.py:976] (0/7) Epoch 18, batch 2400, loss[loss=0.1538, simple_loss=0.2262, pruned_loss=0.04069, over 4906.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2441, pruned_loss=0.05172, over 960686.77 frames. ], batch size: 35, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:14:36,879 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99776.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:14:44,864 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9719, 2.3127, 1.9492, 1.7292, 1.4401, 1.4591, 1.9068, 1.4024], + device='cuda:0'), covar=tensor([0.1725, 0.1511, 0.1462, 0.1816, 0.2289, 0.2006, 0.1108, 0.2097], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0213, 0.0169, 0.0205, 0.0201, 0.0185, 0.0157, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 11:14:56,264 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.1367, 4.0810, 2.8498, 4.7305, 4.1823, 4.1785, 1.6996, 4.0216], + device='cuda:0'), covar=tensor([0.1499, 0.1094, 0.2828, 0.1342, 0.3004, 0.1501, 0.6307, 0.2486], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0217, 0.0253, 0.0309, 0.0301, 0.0252, 0.0276, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 11:15:35,836 INFO [finetune.py:976] (0/7) Epoch 18, batch 2450, loss[loss=0.1617, simple_loss=0.2449, pruned_loss=0.03925, over 4905.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2416, pruned_loss=0.05095, over 959185.26 frames. ], batch size: 36, lr: 3.34e-03, grad_scale: 64.0 +2023-04-27 11:15:35,922 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4516, 1.4334, 4.0215, 3.7567, 3.5599, 3.8019, 3.7287, 3.5795], + device='cuda:0'), covar=tensor([0.7239, 0.5665, 0.1082, 0.1862, 0.1237, 0.2053, 0.1838, 0.1427], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0311, 0.0406, 0.0410, 0.0352, 0.0407, 0.0313, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:15:37,733 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99824.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 11:16:08,394 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.453e+01 1.597e+02 1.870e+02 2.238e+02 4.750e+02, threshold=3.741e+02, percent-clipped=1.0 +2023-04-27 11:16:19,143 INFO [finetune.py:976] (0/7) Epoch 18, batch 2500, loss[loss=0.1887, simple_loss=0.2711, pruned_loss=0.05309, over 4903.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2445, pruned_loss=0.05187, over 960314.17 frames. ], batch size: 37, lr: 3.34e-03, grad_scale: 64.0 +2023-04-27 11:16:28,690 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99885.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:16:45,255 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8800, 1.8525, 1.8462, 1.5163, 2.0402, 1.7144, 2.5992, 1.5791], + device='cuda:0'), covar=tensor([0.3725, 0.1948, 0.4484, 0.2887, 0.1536, 0.2403, 0.1272, 0.4933], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0345, 0.0424, 0.0351, 0.0380, 0.0377, 0.0368, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:16:50,056 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.13 vs. limit=5.0 +2023-04-27 11:16:52,953 INFO [finetune.py:976] (0/7) Epoch 18, batch 2550, loss[loss=0.2011, simple_loss=0.2661, pruned_loss=0.06807, over 4870.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2479, pruned_loss=0.05291, over 957770.69 frames. ], batch size: 31, lr: 3.34e-03, grad_scale: 64.0 +2023-04-27 11:16:54,875 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99924.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:16:54,892 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99924.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:17:00,810 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=99933.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:17:10,139 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=99945.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:17:16,096 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.590e+02 1.888e+02 2.384e+02 3.870e+02, threshold=3.776e+02, percent-clipped=2.0 +2023-04-27 11:17:26,891 INFO [finetune.py:976] (0/7) Epoch 18, batch 2600, loss[loss=0.2199, simple_loss=0.2974, pruned_loss=0.0712, over 4907.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2504, pruned_loss=0.05361, over 957726.07 frames. ], batch size: 38, lr: 3.34e-03, grad_scale: 64.0 +2023-04-27 11:17:27,553 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=99972.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:17:32,439 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=99980.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:17:47,057 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-100000.pt +2023-04-27 11:17:49,722 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-27 11:17:52,681 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100007.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 11:18:01,562 INFO [finetune.py:976] (0/7) Epoch 18, batch 2650, loss[loss=0.195, simple_loss=0.2679, pruned_loss=0.06107, over 4892.00 frames. ], tot_loss[loss=0.1797, simple_loss=0.2514, pruned_loss=0.05402, over 958191.26 frames. ], batch size: 37, lr: 3.34e-03, grad_scale: 64.0 +2023-04-27 11:18:05,839 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=100028.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:18:06,477 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2185, 2.4762, 0.8598, 1.5625, 1.5292, 1.8891, 1.6184, 0.9083], + device='cuda:0'), covar=tensor([0.1378, 0.1064, 0.1703, 0.1208, 0.1121, 0.0870, 0.1427, 0.1648], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0242, 0.0136, 0.0120, 0.0132, 0.0152, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:18:10,664 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100036.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:18:24,399 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.122e+02 1.666e+02 1.842e+02 2.203e+02 3.197e+02, threshold=3.685e+02, percent-clipped=0.0 +2023-04-27 11:18:29,413 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7098, 1.6097, 1.9219, 2.0261, 1.6538, 1.3471, 1.6833, 1.0502], + device='cuda:0'), covar=tensor([0.0759, 0.0646, 0.0559, 0.0825, 0.0778, 0.0977, 0.0670, 0.0701], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0069, 0.0068, 0.0067, 0.0075, 0.0096, 0.0074, 0.0067], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:18:33,088 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100068.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 11:18:33,642 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9013, 1.4065, 5.1115, 4.7963, 4.4667, 4.9284, 4.5519, 4.5369], + device='cuda:0'), covar=tensor([0.7039, 0.5671, 0.1086, 0.1822, 0.0995, 0.1185, 0.1220, 0.1616], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0308, 0.0404, 0.0409, 0.0350, 0.0406, 0.0312, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:18:35,203 INFO [finetune.py:976] (0/7) Epoch 18, batch 2700, loss[loss=0.1813, simple_loss=0.2553, pruned_loss=0.0537, over 4844.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2491, pruned_loss=0.05276, over 955841.59 frames. ], batch size: 49, lr: 3.34e-03, grad_scale: 64.0 +2023-04-27 11:18:38,389 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100076.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:18:56,008 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8554, 2.1265, 2.0000, 2.2228, 1.9979, 2.0683, 2.0802, 2.0046], + device='cuda:0'), covar=tensor([0.4166, 0.6197, 0.5445, 0.4804, 0.6083, 0.7523, 0.6487, 0.5983], + device='cuda:0'), in_proj_covar=tensor([0.0331, 0.0371, 0.0318, 0.0332, 0.0342, 0.0392, 0.0353, 0.0324], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 11:19:06,243 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6462, 2.0220, 1.7652, 1.9651, 1.5056, 1.7279, 1.7398, 1.3384], + device='cuda:0'), covar=tensor([0.1594, 0.1038, 0.0672, 0.0855, 0.2729, 0.0879, 0.1447, 0.2043], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0304, 0.0218, 0.0279, 0.0311, 0.0259, 0.0249, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1473e-04, 1.2099e-04, 8.6462e-05, 1.1094e-04, 1.2632e-04, 1.0273e-04, + 1.0058e-04, 1.0519e-04], device='cuda:0') +2023-04-27 11:19:37,243 INFO [finetune.py:976] (0/7) Epoch 18, batch 2750, loss[loss=0.1451, simple_loss=0.211, pruned_loss=0.03961, over 4842.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2462, pruned_loss=0.05201, over 954058.94 frames. ], batch size: 49, lr: 3.34e-03, grad_scale: 64.0 +2023-04-27 11:19:39,079 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=100124.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:19:39,115 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100124.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 11:20:11,926 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.977e+01 1.603e+02 1.951e+02 2.448e+02 3.827e+02, threshold=3.902e+02, percent-clipped=2.0 +2023-04-27 11:20:25,188 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3490, 1.2456, 1.3573, 1.6028, 1.5938, 1.2715, 0.9296, 1.4864], + device='cuda:0'), covar=tensor([0.0714, 0.1273, 0.0754, 0.0501, 0.0599, 0.0802, 0.0875, 0.0558], + device='cuda:0'), in_proj_covar=tensor([0.0190, 0.0203, 0.0184, 0.0173, 0.0180, 0.0183, 0.0153, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:20:28,162 INFO [finetune.py:976] (0/7) Epoch 18, batch 2800, loss[loss=0.1787, simple_loss=0.2414, pruned_loss=0.05794, over 4936.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2427, pruned_loss=0.05048, over 956099.04 frames. ], batch size: 33, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:20:34,230 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=100172.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 11:20:36,019 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1963, 3.0659, 1.0958, 1.7270, 1.6861, 2.2320, 1.8622, 1.1648], + device='cuda:0'), covar=tensor([0.1696, 0.1488, 0.2035, 0.1474, 0.1285, 0.1223, 0.1554, 0.2098], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0241, 0.0136, 0.0120, 0.0132, 0.0152, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:21:07,291 INFO [finetune.py:976] (0/7) Epoch 18, batch 2850, loss[loss=0.1491, simple_loss=0.2216, pruned_loss=0.03835, over 4769.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2415, pruned_loss=0.0507, over 954909.59 frames. ], batch size: 26, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:21:09,242 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100224.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:21:29,455 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100238.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:21:39,962 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100245.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:21:52,185 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.601e+02 1.939e+02 2.362e+02 3.802e+02, threshold=3.878e+02, percent-clipped=0.0 +2023-04-27 11:22:14,793 INFO [finetune.py:976] (0/7) Epoch 18, batch 2900, loss[loss=0.1865, simple_loss=0.2617, pruned_loss=0.05568, over 4833.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2436, pruned_loss=0.05095, over 956240.54 frames. ], batch size: 30, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:22:15,462 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=100272.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:22:24,007 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7257, 4.1393, 0.6897, 2.2224, 2.1461, 2.6147, 2.5441, 1.0248], + device='cuda:0'), covar=tensor([0.1519, 0.0876, 0.2342, 0.1243, 0.1210, 0.1117, 0.1356, 0.2199], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0241, 0.0136, 0.0120, 0.0131, 0.0151, 0.0116, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:22:28,727 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=100293.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:22:32,420 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100299.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:22:48,676 INFO [finetune.py:976] (0/7) Epoch 18, batch 2950, loss[loss=0.2195, simple_loss=0.2928, pruned_loss=0.07304, over 4804.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2463, pruned_loss=0.05192, over 955039.34 frames. ], batch size: 41, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:22:57,896 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100336.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:23:02,048 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8104, 3.7484, 2.7090, 4.4295, 3.8665, 3.8149, 1.7655, 3.7450], + device='cuda:0'), covar=tensor([0.1585, 0.1213, 0.3096, 0.1673, 0.2864, 0.1648, 0.5452, 0.2396], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0213, 0.0248, 0.0304, 0.0296, 0.0247, 0.0271, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 11:23:09,851 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.535e+02 1.848e+02 2.418e+02 4.913e+02, threshold=3.697e+02, percent-clipped=1.0 +2023-04-27 11:23:15,824 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100363.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 11:23:22,041 INFO [finetune.py:976] (0/7) Epoch 18, batch 3000, loss[loss=0.1649, simple_loss=0.2345, pruned_loss=0.04769, over 4845.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2484, pruned_loss=0.05298, over 954969.62 frames. ], batch size: 30, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:23:22,042 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 11:23:32,628 INFO [finetune.py:1010] (0/7) Epoch 18, validation: loss=0.1524, simple_loss=0.2231, pruned_loss=0.04086, over 2265189.00 frames. +2023-04-27 11:23:32,629 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 11:23:41,185 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=100384.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:23:52,498 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100402.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:24:04,563 INFO [finetune.py:976] (0/7) Epoch 18, batch 3050, loss[loss=0.1902, simple_loss=0.2613, pruned_loss=0.0596, over 4840.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2481, pruned_loss=0.05247, over 953435.86 frames. ], batch size: 49, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:24:43,545 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.094e+02 1.525e+02 1.858e+02 2.098e+02 3.467e+02, threshold=3.716e+02, percent-clipped=0.0 +2023-04-27 11:24:55,000 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100463.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 11:24:59,804 INFO [finetune.py:976] (0/7) Epoch 18, batch 3100, loss[loss=0.1558, simple_loss=0.2258, pruned_loss=0.0429, over 4760.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2471, pruned_loss=0.0523, over 954367.85 frames. ], batch size: 26, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:25:47,686 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7639, 4.2062, 0.8052, 2.2998, 2.5099, 2.6088, 2.5158, 0.9660], + device='cuda:0'), covar=tensor([0.1362, 0.0858, 0.2146, 0.1141, 0.0920, 0.1124, 0.1413, 0.2070], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0242, 0.0136, 0.0120, 0.0132, 0.0152, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:26:02,765 INFO [finetune.py:976] (0/7) Epoch 18, batch 3150, loss[loss=0.2323, simple_loss=0.292, pruned_loss=0.0863, over 4798.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2448, pruned_loss=0.05173, over 954535.24 frames. ], batch size: 45, lr: 3.34e-03, grad_scale: 32.0 +2023-04-27 11:26:37,705 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.912e+01 1.520e+02 1.805e+02 2.268e+02 8.838e+02, threshold=3.610e+02, percent-clipped=4.0 +2023-04-27 11:26:38,614 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-27 11:26:59,499 INFO [finetune.py:976] (0/7) Epoch 18, batch 3200, loss[loss=0.1549, simple_loss=0.2221, pruned_loss=0.04382, over 4757.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2419, pruned_loss=0.05084, over 955648.54 frames. ], batch size: 28, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:27:33,044 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100594.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:28:06,114 INFO [finetune.py:976] (0/7) Epoch 18, batch 3250, loss[loss=0.2087, simple_loss=0.2742, pruned_loss=0.07165, over 4810.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2433, pruned_loss=0.05184, over 954841.71 frames. ], batch size: 45, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:28:07,476 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3387, 1.5966, 1.4319, 1.8512, 1.6666, 2.0177, 1.5220, 3.5662], + device='cuda:0'), covar=tensor([0.0579, 0.0820, 0.0795, 0.1138, 0.0633, 0.0446, 0.0728, 0.0122], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 11:28:27,934 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4944, 1.4494, 1.8011, 1.8257, 1.3585, 1.2036, 1.4911, 1.0089], + device='cuda:0'), covar=tensor([0.0517, 0.0728, 0.0368, 0.0528, 0.0785, 0.1214, 0.0601, 0.0634], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0068, 0.0067, 0.0067, 0.0074, 0.0094, 0.0073, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:28:30,300 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.424e+01 1.529e+02 1.873e+02 2.219e+02 4.824e+02, threshold=3.746e+02, percent-clipped=4.0 +2023-04-27 11:28:34,096 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7947, 3.6829, 2.7369, 4.3932, 3.7128, 3.7638, 1.8335, 3.7756], + device='cuda:0'), covar=tensor([0.1605, 0.1182, 0.3605, 0.1255, 0.2534, 0.1562, 0.5043, 0.2313], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0214, 0.0249, 0.0303, 0.0296, 0.0247, 0.0270, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 11:28:35,350 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100663.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 11:28:40,637 INFO [finetune.py:976] (0/7) Epoch 18, batch 3300, loss[loss=0.1701, simple_loss=0.2416, pruned_loss=0.04935, over 4901.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2478, pruned_loss=0.05276, over 955635.56 frames. ], batch size: 32, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:29:07,771 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=100711.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 11:29:13,775 INFO [finetune.py:976] (0/7) Epoch 18, batch 3350, loss[loss=0.1359, simple_loss=0.2015, pruned_loss=0.03514, over 4062.00 frames. ], tot_loss[loss=0.1777, simple_loss=0.2489, pruned_loss=0.05326, over 954390.38 frames. ], batch size: 17, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:29:19,913 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=100730.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 11:29:23,553 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7409, 1.6365, 1.9554, 2.0389, 1.6735, 1.3790, 1.6810, 1.0342], + device='cuda:0'), covar=tensor([0.0566, 0.0571, 0.0498, 0.0683, 0.0663, 0.1067, 0.0668, 0.0786], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0068, 0.0067, 0.0067, 0.0074, 0.0095, 0.0073, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:29:37,231 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.219e+01 1.703e+02 2.119e+02 2.646e+02 1.102e+03, threshold=4.237e+02, percent-clipped=5.0 +2023-04-27 11:29:39,114 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=100758.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 11:29:44,058 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8026, 1.0597, 3.2551, 3.0165, 2.9183, 3.1519, 3.1065, 2.8836], + device='cuda:0'), covar=tensor([0.8030, 0.5757, 0.1693, 0.2468, 0.1456, 0.1887, 0.2046, 0.1770], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0305, 0.0403, 0.0404, 0.0348, 0.0401, 0.0311, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:29:47,008 INFO [finetune.py:976] (0/7) Epoch 18, batch 3400, loss[loss=0.206, simple_loss=0.2745, pruned_loss=0.06873, over 4896.00 frames. ], tot_loss[loss=0.1795, simple_loss=0.251, pruned_loss=0.05405, over 957267.67 frames. ], batch size: 36, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:30:00,853 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=100791.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 11:30:20,272 INFO [finetune.py:976] (0/7) Epoch 18, batch 3450, loss[loss=0.1999, simple_loss=0.268, pruned_loss=0.0659, over 4901.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2503, pruned_loss=0.05356, over 958277.72 frames. ], batch size: 37, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:30:54,164 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.615e+02 1.948e+02 2.378e+02 4.172e+02, threshold=3.896e+02, percent-clipped=0.0 +2023-04-27 11:31:09,717 INFO [finetune.py:976] (0/7) Epoch 18, batch 3500, loss[loss=0.1474, simple_loss=0.2295, pruned_loss=0.03268, over 4913.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2481, pruned_loss=0.0528, over 956344.80 frames. ], batch size: 43, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:31:25,319 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0445, 1.0246, 1.2250, 1.1604, 0.9725, 0.9358, 0.9810, 0.4908], + device='cuda:0'), covar=tensor([0.0586, 0.0682, 0.0495, 0.0615, 0.0838, 0.1234, 0.0567, 0.0766], + device='cuda:0'), in_proj_covar=tensor([0.0067, 0.0068, 0.0067, 0.0067, 0.0074, 0.0095, 0.0073, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:31:30,609 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=100894.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:31:49,439 INFO [finetune.py:976] (0/7) Epoch 18, batch 3550, loss[loss=0.1612, simple_loss=0.2348, pruned_loss=0.0438, over 4728.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2456, pruned_loss=0.05205, over 957542.49 frames. ], batch size: 23, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:32:02,715 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=100942.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:32:23,486 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.524e+02 1.758e+02 2.158e+02 5.191e+02, threshold=3.516e+02, percent-clipped=1.0 +2023-04-27 11:32:39,053 INFO [finetune.py:976] (0/7) Epoch 18, batch 3600, loss[loss=0.1716, simple_loss=0.2423, pruned_loss=0.05049, over 4753.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2434, pruned_loss=0.05139, over 956651.38 frames. ], batch size: 54, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:33:40,718 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6235, 2.1389, 1.5939, 1.4129, 1.2096, 1.2056, 1.5565, 1.1659], + device='cuda:0'), covar=tensor([0.1598, 0.1221, 0.1426, 0.1702, 0.2236, 0.1853, 0.1029, 0.2025], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0213, 0.0169, 0.0206, 0.0201, 0.0185, 0.0156, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 11:33:50,171 INFO [finetune.py:976] (0/7) Epoch 18, batch 3650, loss[loss=0.2095, simple_loss=0.2957, pruned_loss=0.06163, over 4834.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2454, pruned_loss=0.05228, over 955290.26 frames. ], batch size: 47, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:34:05,760 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7286, 2.2670, 1.8197, 1.6317, 1.3122, 1.3227, 1.8439, 1.2224], + device='cuda:0'), covar=tensor([0.1718, 0.1296, 0.1402, 0.1741, 0.2384, 0.2000, 0.1057, 0.2120], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0213, 0.0169, 0.0206, 0.0200, 0.0185, 0.0156, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 11:34:31,992 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.646e+02 1.981e+02 2.447e+02 4.488e+02, threshold=3.961e+02, percent-clipped=2.0 +2023-04-27 11:34:34,866 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101058.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:34:48,424 INFO [finetune.py:976] (0/7) Epoch 18, batch 3700, loss[loss=0.1777, simple_loss=0.2542, pruned_loss=0.05058, over 4905.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2481, pruned_loss=0.05276, over 955432.20 frames. ], batch size: 37, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:34:57,617 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101086.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 11:35:07,949 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4103, 1.9227, 2.2883, 2.7893, 2.2569, 1.8275, 1.5529, 2.1488], + device='cuda:0'), covar=tensor([0.3284, 0.3222, 0.1741, 0.2558, 0.2540, 0.2620, 0.3952, 0.2047], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0244, 0.0224, 0.0311, 0.0216, 0.0229, 0.0226, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 11:35:10,796 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=101106.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:35:11,929 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101107.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:35:22,251 INFO [finetune.py:976] (0/7) Epoch 18, batch 3750, loss[loss=0.1462, simple_loss=0.2197, pruned_loss=0.03636, over 4762.00 frames. ], tot_loss[loss=0.1781, simple_loss=0.2499, pruned_loss=0.05317, over 955549.89 frames. ], batch size: 27, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:35:43,442 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.572e+02 1.895e+02 2.251e+02 5.219e+02, threshold=3.790e+02, percent-clipped=2.0 +2023-04-27 11:35:54,456 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101168.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:35:56,191 INFO [finetune.py:976] (0/7) Epoch 18, batch 3800, loss[loss=0.1821, simple_loss=0.2589, pruned_loss=0.0526, over 4781.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2507, pruned_loss=0.05365, over 955423.40 frames. ], batch size: 25, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:36:13,371 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7364, 2.4126, 2.6953, 3.3560, 3.1052, 2.7900, 2.1971, 2.8791], + device='cuda:0'), covar=tensor([0.0808, 0.0964, 0.0609, 0.0490, 0.0559, 0.0755, 0.0677, 0.0496], + device='cuda:0'), in_proj_covar=tensor([0.0189, 0.0202, 0.0183, 0.0172, 0.0178, 0.0181, 0.0151, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:36:30,056 INFO [finetune.py:976] (0/7) Epoch 18, batch 3850, loss[loss=0.1754, simple_loss=0.2475, pruned_loss=0.05162, over 4832.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2491, pruned_loss=0.05264, over 956387.89 frames. ], batch size: 30, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:36:48,442 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-27 11:36:50,614 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.513e+02 1.818e+02 2.217e+02 6.339e+02, threshold=3.636e+02, percent-clipped=4.0 +2023-04-27 11:37:02,693 INFO [finetune.py:976] (0/7) Epoch 18, batch 3900, loss[loss=0.1618, simple_loss=0.2288, pruned_loss=0.04733, over 4825.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.246, pruned_loss=0.05234, over 954079.76 frames. ], batch size: 33, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:37:09,334 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-27 11:37:35,479 INFO [finetune.py:976] (0/7) Epoch 18, batch 3950, loss[loss=0.1814, simple_loss=0.2462, pruned_loss=0.05829, over 4822.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.243, pruned_loss=0.05143, over 955650.69 frames. ], batch size: 41, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:38:08,007 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1004, 1.6041, 1.9438, 2.2056, 1.9021, 1.5454, 1.0127, 1.6606], + device='cuda:0'), covar=tensor([0.3352, 0.3044, 0.1715, 0.2220, 0.2515, 0.2640, 0.4312, 0.2044], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0245, 0.0225, 0.0312, 0.0217, 0.0230, 0.0227, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 11:38:09,083 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.783e+01 1.521e+02 1.787e+02 2.150e+02 4.001e+02, threshold=3.574e+02, percent-clipped=1.0 +2023-04-27 11:38:30,022 INFO [finetune.py:976] (0/7) Epoch 18, batch 4000, loss[loss=0.1943, simple_loss=0.2609, pruned_loss=0.06383, over 4777.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2417, pruned_loss=0.05059, over 955718.87 frames. ], batch size: 28, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:38:52,341 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101386.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 11:39:35,325 INFO [finetune.py:976] (0/7) Epoch 18, batch 4050, loss[loss=0.2256, simple_loss=0.2797, pruned_loss=0.08579, over 4929.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2465, pruned_loss=0.05245, over 956176.64 frames. ], batch size: 33, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:39:55,113 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=101434.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 11:40:07,832 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.179e+02 1.675e+02 1.988e+02 2.422e+02 4.320e+02, threshold=3.976e+02, percent-clipped=3.0 +2023-04-27 11:40:12,820 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=101463.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:40:18,124 INFO [finetune.py:976] (0/7) Epoch 18, batch 4100, loss[loss=0.1431, simple_loss=0.2069, pruned_loss=0.03969, over 4381.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2496, pruned_loss=0.05374, over 955900.10 frames. ], batch size: 19, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:40:51,379 INFO [finetune.py:976] (0/7) Epoch 18, batch 4150, loss[loss=0.1971, simple_loss=0.2761, pruned_loss=0.05899, over 4845.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2501, pruned_loss=0.05364, over 954208.91 frames. ], batch size: 49, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:41:14,403 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.437e+01 1.627e+02 2.025e+02 2.401e+02 3.721e+02, threshold=4.051e+02, percent-clipped=0.0 +2023-04-27 11:41:24,217 INFO [finetune.py:976] (0/7) Epoch 18, batch 4200, loss[loss=0.181, simple_loss=0.2536, pruned_loss=0.05423, over 4909.00 frames. ], tot_loss[loss=0.1774, simple_loss=0.2496, pruned_loss=0.05262, over 955798.16 frames. ], batch size: 46, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:41:58,014 INFO [finetune.py:976] (0/7) Epoch 18, batch 4250, loss[loss=0.195, simple_loss=0.2576, pruned_loss=0.06616, over 4901.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2469, pruned_loss=0.05187, over 956485.49 frames. ], batch size: 35, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:42:01,862 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-27 11:42:21,983 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.254e+01 1.472e+02 1.774e+02 2.178e+02 3.033e+02, threshold=3.548e+02, percent-clipped=0.0 +2023-04-27 11:42:31,623 INFO [finetune.py:976] (0/7) Epoch 18, batch 4300, loss[loss=0.158, simple_loss=0.2265, pruned_loss=0.04473, over 4815.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2425, pruned_loss=0.0506, over 955362.50 frames. ], batch size: 45, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:42:40,102 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101684.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:43:04,507 INFO [finetune.py:976] (0/7) Epoch 18, batch 4350, loss[loss=0.1839, simple_loss=0.2419, pruned_loss=0.06291, over 4823.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2397, pruned_loss=0.05023, over 957046.30 frames. ], batch size: 30, lr: 3.33e-03, grad_scale: 32.0 +2023-04-27 11:43:20,703 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101745.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:43:32,959 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.731e+01 1.464e+02 1.839e+02 2.155e+02 7.335e+02, threshold=3.679e+02, percent-clipped=2.0 +2023-04-27 11:43:43,334 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=101763.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:43:48,150 INFO [finetune.py:976] (0/7) Epoch 18, batch 4400, loss[loss=0.1297, simple_loss=0.1997, pruned_loss=0.02983, over 4719.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2411, pruned_loss=0.05113, over 954770.17 frames. ], batch size: 23, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 11:43:56,209 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-27 11:44:37,290 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=101811.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:44:48,692 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3457, 1.7881, 2.2254, 2.7968, 2.1957, 1.7480, 1.6371, 2.0868], + device='cuda:0'), covar=tensor([0.3413, 0.3399, 0.1709, 0.2100, 0.2695, 0.2769, 0.3979, 0.2033], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0247, 0.0227, 0.0315, 0.0219, 0.0231, 0.0228, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 11:44:49,150 INFO [finetune.py:976] (0/7) Epoch 18, batch 4450, loss[loss=0.149, simple_loss=0.2259, pruned_loss=0.03612, over 4869.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2444, pruned_loss=0.05213, over 952781.34 frames. ], batch size: 31, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 11:45:32,217 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.703e+02 1.979e+02 2.478e+02 5.839e+02, threshold=3.957e+02, percent-clipped=3.0 +2023-04-27 11:45:42,460 INFO [finetune.py:976] (0/7) Epoch 18, batch 4500, loss[loss=0.1853, simple_loss=0.2552, pruned_loss=0.05763, over 4836.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2459, pruned_loss=0.0522, over 953138.97 frames. ], batch size: 49, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 11:46:15,936 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101920.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 11:46:16,439 INFO [finetune.py:976] (0/7) Epoch 18, batch 4550, loss[loss=0.2031, simple_loss=0.2723, pruned_loss=0.06698, over 4913.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2479, pruned_loss=0.05319, over 952467.02 frames. ], batch size: 36, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 11:46:25,177 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6855, 1.2991, 1.3050, 1.3936, 1.8805, 1.4899, 1.1901, 1.2465], + device='cuda:0'), covar=tensor([0.1483, 0.1373, 0.1798, 0.1172, 0.0783, 0.1308, 0.1985, 0.2234], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0314, 0.0353, 0.0291, 0.0331, 0.0309, 0.0303, 0.0372], + device='cuda:0'), out_proj_covar=tensor([6.3955e-05, 6.5322e-05, 7.4994e-05, 5.9067e-05, 6.8829e-05, 6.5012e-05, + 6.3770e-05, 7.9291e-05], device='cuda:0') +2023-04-27 11:46:30,203 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-27 11:46:31,161 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2555, 1.4530, 1.4787, 1.7179, 1.5983, 1.9408, 1.3759, 3.6599], + device='cuda:0'), covar=tensor([0.0617, 0.0823, 0.0805, 0.1209, 0.0682, 0.0522, 0.0807, 0.0135], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0039, 0.0037, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0012, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 11:46:38,530 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.069e+02 1.683e+02 1.915e+02 2.320e+02 4.613e+02, threshold=3.831e+02, percent-clipped=2.0 +2023-04-27 11:46:49,877 INFO [finetune.py:976] (0/7) Epoch 18, batch 4600, loss[loss=0.2205, simple_loss=0.2868, pruned_loss=0.07713, over 4230.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2472, pruned_loss=0.05236, over 952126.20 frames. ], batch size: 65, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 11:46:56,085 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=101981.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 11:46:59,716 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=101987.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:47:07,711 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-102000.pt +2023-04-27 11:47:13,630 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4183, 1.8032, 1.7918, 2.1862, 1.9965, 2.1257, 1.7069, 4.4820], + device='cuda:0'), covar=tensor([0.0520, 0.0760, 0.0750, 0.1074, 0.0612, 0.0537, 0.0705, 0.0093], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 11:47:24,660 INFO [finetune.py:976] (0/7) Epoch 18, batch 4650, loss[loss=0.1616, simple_loss=0.2348, pruned_loss=0.04422, over 4799.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2461, pruned_loss=0.05237, over 954400.09 frames. ], batch size: 29, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 11:47:26,012 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8781, 2.3147, 1.9757, 2.1368, 1.6949, 1.9002, 1.9099, 1.4250], + device='cuda:0'), covar=tensor([0.1719, 0.1257, 0.0736, 0.1110, 0.3162, 0.1162, 0.1725, 0.2584], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0304, 0.0217, 0.0279, 0.0310, 0.0258, 0.0248, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1439e-04, 1.2082e-04, 8.6081e-05, 1.1062e-04, 1.2582e-04, 1.0220e-04, + 1.0027e-04, 1.0516e-04], device='cuda:0') +2023-04-27 11:47:36,303 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102040.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:47:41,241 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102048.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:47:45,410 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.596e+02 1.902e+02 2.192e+02 3.719e+02, threshold=3.804e+02, percent-clipped=0.0 +2023-04-27 11:47:58,101 INFO [finetune.py:976] (0/7) Epoch 18, batch 4700, loss[loss=0.1769, simple_loss=0.2451, pruned_loss=0.05434, over 4922.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2428, pruned_loss=0.0516, over 953138.41 frames. ], batch size: 37, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 11:48:18,214 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7891, 3.6181, 2.8304, 4.3701, 3.7254, 3.7451, 1.5837, 3.7239], + device='cuda:0'), covar=tensor([0.1920, 0.1372, 0.3612, 0.1390, 0.4069, 0.1851, 0.6345, 0.2652], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0215, 0.0251, 0.0304, 0.0299, 0.0250, 0.0273, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 11:48:30,316 INFO [finetune.py:976] (0/7) Epoch 18, batch 4750, loss[loss=0.1632, simple_loss=0.2471, pruned_loss=0.0396, over 4807.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2413, pruned_loss=0.05101, over 953699.82 frames. ], batch size: 45, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 11:48:50,258 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8456, 1.5295, 1.4137, 1.6422, 2.0498, 1.5965, 1.4023, 1.3186], + device='cuda:0'), covar=tensor([0.1512, 0.1360, 0.1946, 0.1234, 0.0927, 0.1628, 0.1994, 0.2262], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0312, 0.0352, 0.0290, 0.0330, 0.0308, 0.0302, 0.0370], + device='cuda:0'), out_proj_covar=tensor([6.3364e-05, 6.4759e-05, 7.4857e-05, 5.8864e-05, 6.8523e-05, 6.4773e-05, + 6.3523e-05, 7.8886e-05], device='cuda:0') +2023-04-27 11:48:51,940 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.089e+02 1.496e+02 1.838e+02 2.058e+02 4.267e+02, threshold=3.676e+02, percent-clipped=2.0 +2023-04-27 11:49:09,300 INFO [finetune.py:976] (0/7) Epoch 18, batch 4800, loss[loss=0.1878, simple_loss=0.2805, pruned_loss=0.04756, over 4900.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2451, pruned_loss=0.05219, over 954763.52 frames. ], batch size: 43, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:49:18,698 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6871, 1.3425, 1.8440, 2.1089, 1.7297, 1.6645, 1.7736, 1.7200], + device='cuda:0'), covar=tensor([0.4796, 0.6907, 0.6701, 0.5834, 0.6041, 0.8084, 0.8366, 0.8724], + device='cuda:0'), in_proj_covar=tensor([0.0422, 0.0407, 0.0499, 0.0501, 0.0451, 0.0476, 0.0484, 0.0488], + device='cuda:0'), out_proj_covar=tensor([1.0171e-04, 9.9959e-05, 1.1213e-04, 1.1951e-04, 1.0805e-04, 1.1427e-04, + 1.1464e-04, 1.1523e-04], device='cuda:0') +2023-04-27 11:49:54,637 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1693, 2.1008, 1.9433, 1.8236, 2.2948, 1.8401, 2.8255, 1.7096], + device='cuda:0'), covar=tensor([0.3712, 0.2072, 0.4257, 0.3256, 0.1642, 0.2484, 0.1217, 0.4101], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0349, 0.0430, 0.0359, 0.0385, 0.0384, 0.0373, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:50:15,446 INFO [finetune.py:976] (0/7) Epoch 18, batch 4850, loss[loss=0.1913, simple_loss=0.276, pruned_loss=0.05336, over 4767.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2497, pruned_loss=0.05338, over 954501.92 frames. ], batch size: 54, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:50:39,271 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 11:50:58,314 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.240e+02 1.715e+02 2.079e+02 2.399e+02 4.707e+02, threshold=4.157e+02, percent-clipped=1.0 +2023-04-27 11:51:19,609 INFO [finetune.py:976] (0/7) Epoch 18, batch 4900, loss[loss=0.1824, simple_loss=0.2558, pruned_loss=0.05449, over 4925.00 frames. ], tot_loss[loss=0.1798, simple_loss=0.2515, pruned_loss=0.05406, over 955422.86 frames. ], batch size: 41, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:51:23,227 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102276.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 11:51:31,596 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-27 11:52:04,380 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3135, 3.2135, 0.7837, 1.7690, 1.6247, 2.2973, 1.7481, 0.9822], + device='cuda:0'), covar=tensor([0.1412, 0.0891, 0.2054, 0.1280, 0.1124, 0.0951, 0.1582, 0.1948], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0245, 0.0138, 0.0121, 0.0133, 0.0154, 0.0119, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 11:52:25,539 INFO [finetune.py:976] (0/7) Epoch 18, batch 4950, loss[loss=0.1445, simple_loss=0.2244, pruned_loss=0.03229, over 4677.00 frames. ], tot_loss[loss=0.1807, simple_loss=0.2529, pruned_loss=0.05428, over 953270.59 frames. ], batch size: 23, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:52:50,469 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102340.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:52:57,562 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=102343.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:53:04,800 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.066e+02 1.629e+02 1.929e+02 2.326e+02 4.533e+02, threshold=3.857e+02, percent-clipped=1.0 +2023-04-27 11:53:14,474 INFO [finetune.py:976] (0/7) Epoch 18, batch 5000, loss[loss=0.1624, simple_loss=0.2323, pruned_loss=0.04627, over 4799.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2501, pruned_loss=0.05364, over 954149.07 frames. ], batch size: 51, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:53:28,301 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=102388.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:53:48,401 INFO [finetune.py:976] (0/7) Epoch 18, batch 5050, loss[loss=0.1399, simple_loss=0.2195, pruned_loss=0.03015, over 4810.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2481, pruned_loss=0.05357, over 954592.47 frames. ], batch size: 51, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:54:23,901 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.725e+01 1.630e+02 1.921e+02 2.242e+02 3.810e+02, threshold=3.842e+02, percent-clipped=0.0 +2023-04-27 11:54:45,116 INFO [finetune.py:976] (0/7) Epoch 18, batch 5100, loss[loss=0.1272, simple_loss=0.203, pruned_loss=0.02572, over 4761.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2433, pruned_loss=0.0516, over 955425.34 frames. ], batch size: 28, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:54:53,121 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-27 11:55:18,947 INFO [finetune.py:976] (0/7) Epoch 18, batch 5150, loss[loss=0.1832, simple_loss=0.235, pruned_loss=0.06565, over 4145.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2438, pruned_loss=0.05221, over 953820.17 frames. ], batch size: 18, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:55:42,237 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5853, 1.1270, 1.3083, 1.1517, 1.6915, 1.4074, 1.0801, 1.3101], + device='cuda:0'), covar=tensor([0.1714, 0.1611, 0.2407, 0.1791, 0.1018, 0.1540, 0.2319, 0.2343], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0312, 0.0353, 0.0291, 0.0331, 0.0310, 0.0303, 0.0371], + device='cuda:0'), out_proj_covar=tensor([6.3885e-05, 6.4890e-05, 7.4814e-05, 5.9048e-05, 6.8666e-05, 6.5177e-05, + 6.3788e-05, 7.9124e-05], device='cuda:0') +2023-04-27 11:55:52,831 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.715e+02 2.103e+02 2.437e+02 3.988e+02, threshold=4.205e+02, percent-clipped=0.0 +2023-04-27 11:56:08,073 INFO [finetune.py:976] (0/7) Epoch 18, batch 5200, loss[loss=0.1604, simple_loss=0.243, pruned_loss=0.03886, over 4898.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2487, pruned_loss=0.0538, over 954589.26 frames. ], batch size: 37, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:56:11,156 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102576.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 11:56:25,407 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.84 vs. limit=5.0 +2023-04-27 11:56:27,490 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6817, 2.0119, 1.6826, 1.3880, 1.2651, 1.2587, 1.7195, 1.1781], + device='cuda:0'), covar=tensor([0.1696, 0.1304, 0.1350, 0.1762, 0.2385, 0.1839, 0.1004, 0.2064], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0214, 0.0169, 0.0206, 0.0201, 0.0186, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 11:56:42,077 INFO [finetune.py:976] (0/7) Epoch 18, batch 5250, loss[loss=0.2217, simple_loss=0.2935, pruned_loss=0.07497, over 4899.00 frames. ], tot_loss[loss=0.1782, simple_loss=0.2492, pruned_loss=0.05358, over 952368.08 frames. ], batch size: 37, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:56:42,797 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1159, 2.6167, 1.0781, 1.4868, 2.0903, 1.3998, 3.4443, 1.8578], + device='cuda:0'), covar=tensor([0.0609, 0.0565, 0.0828, 0.1226, 0.0498, 0.0929, 0.0195, 0.0591], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 11:56:43,956 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=102624.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 11:56:45,166 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3402, 2.0683, 2.3865, 2.7406, 2.8448, 2.1963, 1.9417, 2.3281], + device='cuda:0'), covar=tensor([0.0890, 0.1073, 0.0718, 0.0655, 0.0621, 0.0927, 0.0803, 0.0625], + device='cuda:0'), in_proj_covar=tensor([0.0189, 0.0201, 0.0182, 0.0171, 0.0177, 0.0181, 0.0150, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 11:56:56,934 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=102643.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:57:11,272 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.529e+02 1.797e+02 2.306e+02 3.318e+02, threshold=3.594e+02, percent-clipped=0.0 +2023-04-27 11:57:26,879 INFO [finetune.py:976] (0/7) Epoch 18, batch 5300, loss[loss=0.1532, simple_loss=0.2402, pruned_loss=0.0331, over 4778.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2503, pruned_loss=0.05381, over 954558.94 frames. ], batch size: 29, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:57:55,086 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=102691.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 11:58:31,734 INFO [finetune.py:976] (0/7) Epoch 18, batch 5350, loss[loss=0.1504, simple_loss=0.2324, pruned_loss=0.03419, over 4735.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2498, pruned_loss=0.0529, over 954641.97 frames. ], batch size: 54, lr: 3.32e-03, grad_scale: 64.0 +2023-04-27 11:58:41,905 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.89 vs. limit=5.0 +2023-04-27 11:58:50,038 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1981, 1.5597, 1.6584, 1.8188, 1.7446, 1.8732, 1.7177, 1.7352], + device='cuda:0'), covar=tensor([0.3323, 0.4740, 0.4256, 0.3906, 0.4882, 0.6098, 0.4661, 0.4724], + device='cuda:0'), in_proj_covar=tensor([0.0331, 0.0372, 0.0319, 0.0331, 0.0343, 0.0392, 0.0355, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 11:59:00,896 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5074, 1.5947, 1.4236, 1.0419, 1.1756, 1.1198, 1.3742, 1.1206], + device='cuda:0'), covar=tensor([0.1780, 0.1339, 0.1573, 0.1906, 0.2487, 0.2031, 0.1171, 0.2142], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0213, 0.0168, 0.0206, 0.0201, 0.0185, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 11:59:22,079 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.383e+01 1.595e+02 1.837e+02 2.151e+02 4.228e+02, threshold=3.674e+02, percent-clipped=3.0 +2023-04-27 11:59:37,386 INFO [finetune.py:976] (0/7) Epoch 18, batch 5400, loss[loss=0.1494, simple_loss=0.2207, pruned_loss=0.03905, over 4793.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2477, pruned_loss=0.05278, over 954064.67 frames. ], batch size: 26, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 12:00:39,491 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5493, 1.5221, 4.2988, 3.9925, 3.7529, 4.0905, 4.0565, 3.7617], + device='cuda:0'), covar=tensor([0.6989, 0.5931, 0.1092, 0.1745, 0.1154, 0.1543, 0.1241, 0.1569], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0311, 0.0411, 0.0413, 0.0355, 0.0412, 0.0317, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:00:40,216 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.54 vs. limit=5.0 +2023-04-27 12:00:49,559 INFO [finetune.py:976] (0/7) Epoch 18, batch 5450, loss[loss=0.1631, simple_loss=0.2306, pruned_loss=0.04782, over 4824.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2443, pruned_loss=0.05184, over 954533.23 frames. ], batch size: 39, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 12:01:23,542 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102851.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:01:26,510 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 12:01:33,555 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.567e+02 1.920e+02 2.316e+02 4.548e+02, threshold=3.840e+02, percent-clipped=3.0 +2023-04-27 12:01:42,109 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.46 vs. limit=5.0 +2023-04-27 12:01:44,220 INFO [finetune.py:976] (0/7) Epoch 18, batch 5500, loss[loss=0.1807, simple_loss=0.2562, pruned_loss=0.05259, over 4812.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2416, pruned_loss=0.05093, over 953382.32 frames. ], batch size: 51, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 12:01:59,954 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=102887.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:02:17,481 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102912.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:02:23,854 INFO [finetune.py:976] (0/7) Epoch 18, batch 5550, loss[loss=0.1455, simple_loss=0.2271, pruned_loss=0.03189, over 4756.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2427, pruned_loss=0.05079, over 954306.24 frames. ], batch size: 28, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 12:02:40,493 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=102948.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:02:45,152 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.541e+02 1.915e+02 2.352e+02 3.794e+02, threshold=3.830e+02, percent-clipped=0.0 +2023-04-27 12:02:53,923 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1877, 2.1414, 1.7551, 1.8264, 2.1569, 1.7821, 2.7105, 1.5674], + device='cuda:0'), covar=tensor([0.3508, 0.1969, 0.4193, 0.3128, 0.1858, 0.2451, 0.1342, 0.4134], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0347, 0.0429, 0.0358, 0.0382, 0.0383, 0.0372, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:02:54,416 INFO [finetune.py:976] (0/7) Epoch 18, batch 5600, loss[loss=0.1951, simple_loss=0.2763, pruned_loss=0.05698, over 4817.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2455, pruned_loss=0.05135, over 952225.65 frames. ], batch size: 40, lr: 3.32e-03, grad_scale: 32.0 +2023-04-27 12:03:24,718 INFO [finetune.py:976] (0/7) Epoch 18, batch 5650, loss[loss=0.1611, simple_loss=0.2425, pruned_loss=0.0398, over 4922.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2486, pruned_loss=0.05222, over 951703.76 frames. ], batch size: 38, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:03:31,232 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-04-27 12:03:39,866 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1114, 2.4442, 2.0500, 1.8847, 1.6701, 1.6738, 2.0836, 1.5494], + device='cuda:0'), covar=tensor([0.1553, 0.1356, 0.1295, 0.1602, 0.2184, 0.1942, 0.0963, 0.1849], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0212, 0.0168, 0.0205, 0.0200, 0.0185, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 12:03:46,255 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.096e+02 1.540e+02 1.863e+02 2.247e+02 6.465e+02, threshold=3.725e+02, percent-clipped=2.0 +2023-04-27 12:03:55,213 INFO [finetune.py:976] (0/7) Epoch 18, batch 5700, loss[loss=0.1273, simple_loss=0.1902, pruned_loss=0.03216, over 4353.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2442, pruned_loss=0.05168, over 933794.93 frames. ], batch size: 19, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:04:12,203 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-18.pt +2023-04-27 12:04:24,063 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103098.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:04:24,531 INFO [finetune.py:976] (0/7) Epoch 19, batch 0, loss[loss=0.1949, simple_loss=0.2603, pruned_loss=0.06472, over 4817.00 frames. ], tot_loss[loss=0.1949, simple_loss=0.2603, pruned_loss=0.06472, over 4817.00 frames. ], batch size: 47, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:04:24,533 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 12:04:35,098 INFO [finetune.py:1010] (0/7) Epoch 19, validation: loss=0.1545, simple_loss=0.2248, pruned_loss=0.04209, over 2265189.00 frames. +2023-04-27 12:04:35,098 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 12:04:56,254 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103132.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:05:02,857 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 12:05:07,368 INFO [finetune.py:976] (0/7) Epoch 19, batch 50, loss[loss=0.1854, simple_loss=0.2553, pruned_loss=0.05773, over 4737.00 frames. ], tot_loss[loss=0.1839, simple_loss=0.2536, pruned_loss=0.05709, over 214549.87 frames. ], batch size: 54, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:05:12,640 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 12:05:13,477 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.545e+02 1.855e+02 2.320e+02 4.324e+02, threshold=3.710e+02, percent-clipped=2.0 +2023-04-27 12:05:15,441 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103159.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:05:57,685 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103193.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:06:01,236 INFO [finetune.py:976] (0/7) Epoch 19, batch 100, loss[loss=0.1615, simple_loss=0.2264, pruned_loss=0.04834, over 4836.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2454, pruned_loss=0.05274, over 380782.58 frames. ], batch size: 47, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:06:13,376 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103207.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:06:56,242 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103243.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:07:06,071 INFO [finetune.py:976] (0/7) Epoch 19, batch 150, loss[loss=0.1487, simple_loss=0.2075, pruned_loss=0.04496, over 4835.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2409, pruned_loss=0.05123, over 508787.91 frames. ], batch size: 40, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:07:16,419 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.567e+02 1.850e+02 2.251e+02 4.056e+02, threshold=3.701e+02, percent-clipped=1.0 +2023-04-27 12:07:35,080 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8233, 1.9861, 1.9149, 2.3787, 2.2253, 2.2597, 1.9348, 4.8012], + device='cuda:0'), covar=tensor([0.0489, 0.0729, 0.0725, 0.1010, 0.0563, 0.0481, 0.0660, 0.0089], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 12:07:45,051 INFO [finetune.py:976] (0/7) Epoch 19, batch 200, loss[loss=0.201, simple_loss=0.2718, pruned_loss=0.06505, over 4916.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2404, pruned_loss=0.05186, over 606499.65 frames. ], batch size: 36, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:07:48,058 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2413, 1.6467, 2.1847, 2.6586, 2.1598, 1.6893, 1.4869, 1.9102], + device='cuda:0'), covar=tensor([0.2997, 0.3112, 0.1616, 0.1993, 0.2422, 0.2531, 0.4041, 0.1913], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0246, 0.0226, 0.0313, 0.0219, 0.0231, 0.0227, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 12:08:14,338 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9980, 2.6389, 1.0122, 1.4363, 1.8486, 1.2641, 3.3660, 1.8531], + device='cuda:0'), covar=tensor([0.0653, 0.0548, 0.0794, 0.1189, 0.0515, 0.0960, 0.0221, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0052, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0009, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 12:08:33,911 INFO [finetune.py:976] (0/7) Epoch 19, batch 250, loss[loss=0.1413, simple_loss=0.2186, pruned_loss=0.032, over 4901.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2424, pruned_loss=0.05194, over 684095.65 frames. ], batch size: 32, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:08:44,151 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.956e+01 1.630e+02 1.978e+02 2.388e+02 4.596e+02, threshold=3.957e+02, percent-clipped=1.0 +2023-04-27 12:08:56,545 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103366.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 12:08:58,384 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9026, 2.4573, 1.0213, 1.2958, 1.7912, 1.2255, 2.9671, 1.5870], + device='cuda:0'), covar=tensor([0.0697, 0.0528, 0.0758, 0.1260, 0.0469, 0.0985, 0.0248, 0.0631], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0048, 0.0046, 0.0050, 0.0052, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 12:09:14,225 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-04-27 12:09:22,446 INFO [finetune.py:976] (0/7) Epoch 19, batch 300, loss[loss=0.1802, simple_loss=0.2458, pruned_loss=0.05729, over 4768.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2461, pruned_loss=0.05257, over 743709.31 frames. ], batch size: 26, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:09:42,600 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103427.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 12:09:55,319 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6405, 1.5383, 1.9139, 2.0559, 1.5550, 1.3718, 1.7206, 1.1159], + device='cuda:0'), covar=tensor([0.0511, 0.0703, 0.0441, 0.0480, 0.0728, 0.1075, 0.0660, 0.0648], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0067, 0.0067, 0.0075, 0.0094, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 12:09:55,811 INFO [finetune.py:976] (0/7) Epoch 19, batch 350, loss[loss=0.1654, simple_loss=0.2296, pruned_loss=0.05062, over 4856.00 frames. ], tot_loss[loss=0.1792, simple_loss=0.2497, pruned_loss=0.05432, over 792267.30 frames. ], batch size: 31, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:09:58,942 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103454.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:10:00,584 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.607e+02 1.938e+02 2.366e+02 5.284e+02, threshold=3.875e+02, percent-clipped=4.0 +2023-04-27 12:10:22,415 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103488.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:10:23,680 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5887, 1.7906, 1.7186, 2.3775, 2.6721, 2.1339, 1.9783, 1.8438], + device='cuda:0'), covar=tensor([0.1708, 0.1712, 0.2144, 0.1467, 0.1114, 0.1818, 0.2178, 0.2279], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0310, 0.0349, 0.0288, 0.0328, 0.0306, 0.0301, 0.0367], + device='cuda:0'), out_proj_covar=tensor([6.3112e-05, 6.4461e-05, 7.3942e-05, 5.8483e-05, 6.8205e-05, 6.4228e-05, + 6.3217e-05, 7.8292e-05], device='cuda:0') +2023-04-27 12:10:24,894 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9966, 1.0235, 1.2337, 1.1629, 0.9781, 0.9087, 0.9735, 0.5519], + device='cuda:0'), covar=tensor([0.0594, 0.0553, 0.0441, 0.0554, 0.0745, 0.1203, 0.0447, 0.0676], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0068, 0.0067, 0.0075, 0.0095, 0.0073, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 12:10:29,038 INFO [finetune.py:976] (0/7) Epoch 19, batch 400, loss[loss=0.1567, simple_loss=0.2155, pruned_loss=0.04899, over 4245.00 frames. ], tot_loss[loss=0.1796, simple_loss=0.2503, pruned_loss=0.05444, over 827301.87 frames. ], batch size: 18, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:10:34,536 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103507.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:10:58,902 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103543.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:10:58,931 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3351, 1.7229, 1.6373, 2.1941, 2.3738, 1.9862, 1.9317, 1.7618], + device='cuda:0'), covar=tensor([0.1766, 0.1756, 0.1969, 0.1600, 0.1306, 0.2053, 0.2443, 0.2208], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0311, 0.0349, 0.0289, 0.0329, 0.0306, 0.0301, 0.0368], + device='cuda:0'), out_proj_covar=tensor([6.3190e-05, 6.4551e-05, 7.4044e-05, 5.8551e-05, 6.8299e-05, 6.4365e-05, + 6.3264e-05, 7.8413e-05], device='cuda:0') +2023-04-27 12:11:02,472 INFO [finetune.py:976] (0/7) Epoch 19, batch 450, loss[loss=0.1456, simple_loss=0.2074, pruned_loss=0.04187, over 4791.00 frames. ], tot_loss[loss=0.1786, simple_loss=0.2492, pruned_loss=0.05395, over 855460.85 frames. ], batch size: 51, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:11:06,186 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=103555.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:11:06,728 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.103e+02 1.516e+02 1.781e+02 2.073e+02 5.569e+02, threshold=3.563e+02, percent-clipped=1.0 +2023-04-27 12:11:31,860 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1462, 2.4378, 2.2257, 2.4345, 1.7716, 2.1153, 2.2696, 1.7592], + device='cuda:0'), covar=tensor([0.1463, 0.0967, 0.0614, 0.0730, 0.2769, 0.0944, 0.1477, 0.1962], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0303, 0.0218, 0.0279, 0.0311, 0.0259, 0.0251, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1488e-04, 1.2061e-04, 8.6466e-05, 1.1062e-04, 1.2621e-04, 1.0268e-04, + 1.0130e-04, 1.0518e-04], device='cuda:0') +2023-04-27 12:11:42,142 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=103591.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:11:46,989 INFO [finetune.py:976] (0/7) Epoch 19, batch 500, loss[loss=0.188, simple_loss=0.2535, pruned_loss=0.06124, over 4900.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2473, pruned_loss=0.05365, over 876910.38 frames. ], batch size: 35, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:11:53,267 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1237, 2.6375, 1.1009, 1.3965, 2.0095, 1.2455, 3.5216, 1.6976], + device='cuda:0'), covar=tensor([0.0696, 0.0614, 0.0851, 0.1512, 0.0563, 0.1185, 0.0324, 0.0732], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 12:12:13,935 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7182, 2.0442, 1.6688, 1.4014, 1.3361, 1.3042, 1.6827, 1.2456], + device='cuda:0'), covar=tensor([0.1594, 0.1251, 0.1386, 0.1765, 0.2264, 0.1921, 0.0963, 0.2010], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0213, 0.0168, 0.0205, 0.0200, 0.0185, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 12:12:30,577 INFO [finetune.py:976] (0/7) Epoch 19, batch 550, loss[loss=0.196, simple_loss=0.2564, pruned_loss=0.06782, over 4907.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2446, pruned_loss=0.05275, over 894158.29 frames. ], batch size: 43, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:12:34,854 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.551e+02 1.854e+02 2.248e+02 4.068e+02, threshold=3.707e+02, percent-clipped=1.0 +2023-04-27 12:13:04,202 INFO [finetune.py:976] (0/7) Epoch 19, batch 600, loss[loss=0.2031, simple_loss=0.2738, pruned_loss=0.06624, over 4745.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2457, pruned_loss=0.05352, over 907547.19 frames. ], batch size: 59, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:13:04,904 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5949, 2.3958, 2.7251, 3.0436, 2.9416, 2.4381, 2.0374, 2.6294], + device='cuda:0'), covar=tensor([0.0863, 0.0859, 0.0499, 0.0502, 0.0607, 0.0830, 0.0743, 0.0565], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0196, 0.0177, 0.0167, 0.0173, 0.0177, 0.0148, 0.0174], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:13:16,156 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0358, 1.3090, 1.2393, 1.5627, 1.4393, 1.4517, 1.3128, 2.4561], + device='cuda:0'), covar=tensor([0.0654, 0.0824, 0.0813, 0.1262, 0.0652, 0.0553, 0.0759, 0.0229], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0014, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 12:13:19,610 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=103722.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 12:13:43,203 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103740.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:13:45,623 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1819, 1.5477, 1.4661, 2.0150, 2.2282, 1.7995, 1.7781, 1.5817], + device='cuda:0'), covar=tensor([0.2013, 0.1982, 0.2220, 0.1599, 0.1443, 0.2182, 0.2609, 0.2524], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0311, 0.0349, 0.0288, 0.0328, 0.0307, 0.0300, 0.0367], + device='cuda:0'), out_proj_covar=tensor([6.3141e-05, 6.4660e-05, 7.3933e-05, 5.8410e-05, 6.8134e-05, 6.4423e-05, + 6.3193e-05, 7.8229e-05], device='cuda:0') +2023-04-27 12:13:53,494 INFO [finetune.py:976] (0/7) Epoch 19, batch 650, loss[loss=0.1288, simple_loss=0.1916, pruned_loss=0.03304, over 4828.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2468, pruned_loss=0.05347, over 918760.78 frames. ], batch size: 25, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:14:01,764 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103754.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:14:02,876 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.593e+02 1.866e+02 2.212e+02 4.115e+02, threshold=3.733e+02, percent-clipped=2.0 +2023-04-27 12:14:14,195 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103765.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:14:30,665 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=103788.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:14:37,245 INFO [finetune.py:976] (0/7) Epoch 19, batch 700, loss[loss=0.1381, simple_loss=0.2125, pruned_loss=0.03188, over 4755.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2466, pruned_loss=0.05308, over 928849.31 frames. ], batch size: 27, lr: 3.31e-03, grad_scale: 32.0 +2023-04-27 12:14:38,726 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103801.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:14:39,254 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=103802.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:14:53,008 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3134, 2.0747, 2.3969, 2.7967, 2.7802, 2.1955, 1.8693, 2.4387], + device='cuda:0'), covar=tensor([0.0805, 0.1048, 0.0586, 0.0529, 0.0571, 0.0858, 0.0772, 0.0560], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0196, 0.0177, 0.0167, 0.0173, 0.0177, 0.0147, 0.0173], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:14:55,304 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103826.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:15:02,433 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=103836.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:15:10,877 INFO [finetune.py:976] (0/7) Epoch 19, batch 750, loss[loss=0.1531, simple_loss=0.2225, pruned_loss=0.04183, over 4883.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.247, pruned_loss=0.05267, over 932754.99 frames. ], batch size: 32, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:15:15,074 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.139e+02 1.609e+02 1.947e+02 2.389e+02 3.942e+02, threshold=3.894e+02, percent-clipped=2.0 +2023-04-27 12:15:21,247 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1292, 1.6768, 2.0604, 2.4184, 2.4598, 1.9689, 1.7386, 2.2411], + device='cuda:0'), covar=tensor([0.0743, 0.1254, 0.0701, 0.0542, 0.0581, 0.0860, 0.0784, 0.0529], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0196, 0.0177, 0.0167, 0.0173, 0.0177, 0.0148, 0.0173], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:15:28,404 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103876.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:15:44,006 INFO [finetune.py:976] (0/7) Epoch 19, batch 800, loss[loss=0.173, simple_loss=0.2405, pruned_loss=0.0528, over 4900.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2477, pruned_loss=0.05279, over 938497.35 frames. ], batch size: 32, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:15:45,974 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3921, 1.3137, 1.5975, 1.5557, 1.2959, 1.2267, 1.3402, 0.8239], + device='cuda:0'), covar=tensor([0.0583, 0.0511, 0.0458, 0.0532, 0.0795, 0.1110, 0.0546, 0.0619], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0067, 0.0067, 0.0074, 0.0094, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 12:16:08,595 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=103937.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 12:16:16,896 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3051, 1.2767, 3.8048, 3.5565, 3.3449, 3.6251, 3.6399, 3.3631], + device='cuda:0'), covar=tensor([0.6424, 0.5401, 0.1093, 0.1609, 0.1113, 0.1673, 0.1598, 0.1408], + device='cuda:0'), in_proj_covar=tensor([0.0302, 0.0303, 0.0400, 0.0403, 0.0345, 0.0402, 0.0310, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:16:17,416 INFO [finetune.py:976] (0/7) Epoch 19, batch 850, loss[loss=0.1896, simple_loss=0.2562, pruned_loss=0.06151, over 4859.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.246, pruned_loss=0.0527, over 938067.84 frames. ], batch size: 44, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:16:21,649 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.943e+01 1.500e+02 1.730e+02 2.145e+02 3.862e+02, threshold=3.461e+02, percent-clipped=0.0 +2023-04-27 12:16:27,888 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.51 vs. limit=5.0 +2023-04-27 12:16:31,040 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 12:16:53,726 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.58 vs. limit=5.0 +2023-04-27 12:16:59,789 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103990.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:17:05,191 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=103997.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:17:10,830 INFO [finetune.py:976] (0/7) Epoch 19, batch 900, loss[loss=0.1637, simple_loss=0.2321, pruned_loss=0.04769, over 4841.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2435, pruned_loss=0.05172, over 942712.37 frames. ], batch size: 49, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:17:11,601 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-104000.pt +2023-04-27 12:17:37,465 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104022.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 12:17:57,236 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-27 12:18:18,255 INFO [finetune.py:976] (0/7) Epoch 19, batch 950, loss[loss=0.1987, simple_loss=0.2543, pruned_loss=0.07153, over 4911.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2416, pruned_loss=0.05144, over 944889.38 frames. ], batch size: 37, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:18:18,347 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=104049.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:18:19,609 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104051.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:18:27,839 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.269e+02 1.627e+02 1.900e+02 2.232e+02 4.587e+02, threshold=3.799e+02, percent-clipped=1.0 +2023-04-27 12:18:29,213 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104058.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:18:32,319 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.91 vs. limit=5.0 +2023-04-27 12:18:41,643 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=104070.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 12:19:02,245 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4068, 1.5430, 1.4329, 1.7252, 1.5766, 1.9208, 1.4291, 3.5498], + device='cuda:0'), covar=tensor([0.0562, 0.0837, 0.0808, 0.1261, 0.0678, 0.0515, 0.0774, 0.0153], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 12:19:14,755 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104096.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:19:21,307 INFO [finetune.py:976] (0/7) Epoch 19, batch 1000, loss[loss=0.1365, simple_loss=0.2168, pruned_loss=0.02814, over 4777.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2449, pruned_loss=0.05225, over 947436.23 frames. ], batch size: 26, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:19:33,861 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=104110.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:19:45,546 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104121.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:20:27,135 INFO [finetune.py:976] (0/7) Epoch 19, batch 1050, loss[loss=0.1328, simple_loss=0.2046, pruned_loss=0.0305, over 4776.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2482, pruned_loss=0.05257, over 949723.48 frames. ], batch size: 26, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:20:37,627 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.418e+01 1.570e+02 1.795e+02 2.236e+02 4.469e+02, threshold=3.589e+02, percent-clipped=1.0 +2023-04-27 12:21:05,398 INFO [finetune.py:976] (0/7) Epoch 19, batch 1100, loss[loss=0.1773, simple_loss=0.2622, pruned_loss=0.04623, over 4897.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2506, pruned_loss=0.05342, over 951834.51 frames. ], batch size: 46, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:21:27,609 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104232.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 12:21:39,375 INFO [finetune.py:976] (0/7) Epoch 19, batch 1150, loss[loss=0.1867, simple_loss=0.2608, pruned_loss=0.05632, over 4811.00 frames. ], tot_loss[loss=0.1784, simple_loss=0.2506, pruned_loss=0.05308, over 953450.66 frames. ], batch size: 33, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:21:39,465 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9289, 2.5347, 1.0247, 1.2579, 1.7925, 1.2243, 3.1481, 1.6465], + device='cuda:0'), covar=tensor([0.0871, 0.0910, 0.0995, 0.1574, 0.0633, 0.1299, 0.0328, 0.0897], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 12:21:44,616 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.137e+01 1.582e+02 1.911e+02 2.328e+02 3.877e+02, threshold=3.822e+02, percent-clipped=1.0 +2023-04-27 12:22:12,703 INFO [finetune.py:976] (0/7) Epoch 19, batch 1200, loss[loss=0.1377, simple_loss=0.2108, pruned_loss=0.03232, over 4848.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.248, pruned_loss=0.05252, over 953111.09 frames. ], batch size: 49, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:22:15,761 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7756, 2.4533, 1.8836, 1.7770, 1.3481, 1.3670, 1.8782, 1.3192], + device='cuda:0'), covar=tensor([0.1700, 0.1289, 0.1421, 0.1769, 0.2418, 0.1952, 0.1000, 0.2099], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0212, 0.0168, 0.0206, 0.0201, 0.0185, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 12:22:29,825 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5092, 1.3739, 1.7582, 1.7454, 1.3530, 1.2716, 1.3973, 0.8494], + device='cuda:0'), covar=tensor([0.0549, 0.0623, 0.0362, 0.0554, 0.0761, 0.1043, 0.0660, 0.0645], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0067, 0.0067, 0.0074, 0.0095, 0.0073, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 12:22:38,333 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8992, 1.7740, 2.0591, 2.2981, 1.7191, 1.4758, 1.8454, 1.0244], + device='cuda:0'), covar=tensor([0.0551, 0.0700, 0.0609, 0.0753, 0.0784, 0.1118, 0.0699, 0.0812], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0067, 0.0067, 0.0075, 0.0095, 0.0074, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 12:22:44,233 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104346.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:22:46,459 INFO [finetune.py:976] (0/7) Epoch 19, batch 1250, loss[loss=0.167, simple_loss=0.2423, pruned_loss=0.04588, over 4825.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2449, pruned_loss=0.05134, over 953733.73 frames. ], batch size: 39, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:22:49,467 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104353.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:22:51,237 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.445e+01 1.483e+02 1.801e+02 2.223e+02 4.756e+02, threshold=3.603e+02, percent-clipped=1.0 +2023-04-27 12:22:55,360 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.3353, 4.1648, 3.1049, 4.9649, 4.3089, 4.2876, 1.7915, 4.1735], + device='cuda:0'), covar=tensor([0.1606, 0.0977, 0.3266, 0.0934, 0.2534, 0.1504, 0.5658, 0.2292], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0215, 0.0249, 0.0308, 0.0299, 0.0248, 0.0272, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 12:23:45,744 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104396.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:23:47,996 INFO [finetune.py:976] (0/7) Epoch 19, batch 1300, loss[loss=0.1674, simple_loss=0.2344, pruned_loss=0.05018, over 4824.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2425, pruned_loss=0.05079, over 954449.66 frames. ], batch size: 33, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:23:56,149 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=104405.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:24:18,872 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104421.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:24:44,100 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-27 12:24:44,507 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=104444.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:24:53,498 INFO [finetune.py:976] (0/7) Epoch 19, batch 1350, loss[loss=0.2492, simple_loss=0.3135, pruned_loss=0.09246, over 4819.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2438, pruned_loss=0.05173, over 957057.44 frames. ], batch size: 39, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:25:03,934 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.562e+02 1.851e+02 2.195e+02 3.087e+02, threshold=3.702e+02, percent-clipped=0.0 +2023-04-27 12:25:23,559 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=104469.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:25:57,943 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2164, 1.6347, 2.0766, 2.5223, 2.1243, 1.6419, 1.3405, 1.8406], + device='cuda:0'), covar=tensor([0.3392, 0.3496, 0.1751, 0.2396, 0.2655, 0.2813, 0.4224, 0.2195], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0247, 0.0228, 0.0316, 0.0220, 0.0232, 0.0228, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 12:25:58,403 INFO [finetune.py:976] (0/7) Epoch 19, batch 1400, loss[loss=0.1811, simple_loss=0.2692, pruned_loss=0.04655, over 4935.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2482, pruned_loss=0.0527, over 955750.63 frames. ], batch size: 33, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:26:30,544 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104532.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 12:26:37,376 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.52 vs. limit=5.0 +2023-04-27 12:26:41,374 INFO [finetune.py:976] (0/7) Epoch 19, batch 1450, loss[loss=0.1844, simple_loss=0.2702, pruned_loss=0.0493, over 4828.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2493, pruned_loss=0.05271, over 955082.28 frames. ], batch size: 49, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:26:46,134 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.618e+02 1.883e+02 2.290e+02 5.063e+02, threshold=3.766e+02, percent-clipped=2.0 +2023-04-27 12:27:03,306 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=104580.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:27:14,736 INFO [finetune.py:976] (0/7) Epoch 19, batch 1500, loss[loss=0.1913, simple_loss=0.2701, pruned_loss=0.05632, over 4894.00 frames. ], tot_loss[loss=0.1783, simple_loss=0.2502, pruned_loss=0.05321, over 955603.38 frames. ], batch size: 43, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:27:46,357 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 12:27:46,864 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104646.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:27:48,602 INFO [finetune.py:976] (0/7) Epoch 19, batch 1550, loss[loss=0.2131, simple_loss=0.275, pruned_loss=0.0756, over 4885.00 frames. ], tot_loss[loss=0.1788, simple_loss=0.2509, pruned_loss=0.05335, over 956004.48 frames. ], batch size: 43, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:27:51,150 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104653.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:27:53,360 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.681e+02 1.907e+02 2.272e+02 3.950e+02, threshold=3.814e+02, percent-clipped=3.0 +2023-04-27 12:28:35,127 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=104694.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:28:43,384 INFO [finetune.py:976] (0/7) Epoch 19, batch 1600, loss[loss=0.1864, simple_loss=0.2478, pruned_loss=0.06246, over 4916.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2485, pruned_loss=0.05279, over 957985.58 frames. ], batch size: 37, lr: 3.30e-03, grad_scale: 32.0 +2023-04-27 12:28:44,681 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=104701.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:28:47,655 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=104705.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:29:06,233 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8598, 2.2299, 1.9853, 2.1593, 1.5411, 1.8657, 1.8773, 1.4924], + device='cuda:0'), covar=tensor([0.1890, 0.1157, 0.0771, 0.1185, 0.3431, 0.1086, 0.1973, 0.2452], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0304, 0.0218, 0.0281, 0.0312, 0.0259, 0.0253, 0.0266], + device='cuda:0'), out_proj_covar=tensor([1.1606e-04, 1.2078e-04, 8.6293e-05, 1.1143e-04, 1.2689e-04, 1.0287e-04, + 1.0196e-04, 1.0567e-04], device='cuda:0') +2023-04-27 12:29:27,150 INFO [finetune.py:976] (0/7) Epoch 19, batch 1650, loss[loss=0.1466, simple_loss=0.2205, pruned_loss=0.03637, over 4750.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2455, pruned_loss=0.05184, over 956532.06 frames. ], batch size: 27, lr: 3.30e-03, grad_scale: 64.0 +2023-04-27 12:29:27,364 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-04-27 12:29:29,678 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=104753.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:29:31,436 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.063e+01 1.479e+02 1.732e+02 2.090e+02 3.270e+02, threshold=3.465e+02, percent-clipped=0.0 +2023-04-27 12:29:38,133 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0540, 1.8201, 2.3286, 2.5419, 2.1714, 2.0520, 2.1627, 2.1715], + device='cuda:0'), covar=tensor([0.4856, 0.6707, 0.6798, 0.5270, 0.5999, 0.8511, 0.8585, 0.9538], + device='cuda:0'), in_proj_covar=tensor([0.0426, 0.0409, 0.0501, 0.0504, 0.0455, 0.0483, 0.0487, 0.0494], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:30:01,072 INFO [finetune.py:976] (0/7) Epoch 19, batch 1700, loss[loss=0.1933, simple_loss=0.2584, pruned_loss=0.06411, over 4788.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.243, pruned_loss=0.05096, over 955497.29 frames. ], batch size: 26, lr: 3.30e-03, grad_scale: 64.0 +2023-04-27 12:30:20,785 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5047, 2.5393, 1.9960, 2.2466, 2.5031, 2.2144, 3.3210, 1.7597], + device='cuda:0'), covar=tensor([0.3656, 0.2142, 0.4626, 0.3301, 0.1748, 0.2558, 0.1600, 0.4609], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0348, 0.0428, 0.0354, 0.0383, 0.0381, 0.0373, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:30:48,746 INFO [finetune.py:976] (0/7) Epoch 19, batch 1750, loss[loss=0.1267, simple_loss=0.2066, pruned_loss=0.02339, over 4757.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2456, pruned_loss=0.05218, over 955461.21 frames. ], batch size: 27, lr: 3.30e-03, grad_scale: 64.0 +2023-04-27 12:30:53,006 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.623e+02 1.966e+02 2.378e+02 4.217e+02, threshold=3.932e+02, percent-clipped=2.0 +2023-04-27 12:31:37,540 INFO [finetune.py:976] (0/7) Epoch 19, batch 1800, loss[loss=0.1759, simple_loss=0.2548, pruned_loss=0.0485, over 4811.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2457, pruned_loss=0.05097, over 955123.98 frames. ], batch size: 45, lr: 3.30e-03, grad_scale: 64.0 +2023-04-27 12:32:10,885 INFO [finetune.py:976] (0/7) Epoch 19, batch 1850, loss[loss=0.1876, simple_loss=0.2523, pruned_loss=0.06151, over 4768.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2485, pruned_loss=0.05276, over 954703.31 frames. ], batch size: 26, lr: 3.30e-03, grad_scale: 64.0 +2023-04-27 12:32:15,623 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.634e+02 1.902e+02 2.257e+02 6.149e+02, threshold=3.804e+02, percent-clipped=1.0 +2023-04-27 12:32:40,030 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5720, 2.6164, 2.1459, 2.4099, 2.7669, 2.4314, 3.6053, 1.8602], + device='cuda:0'), covar=tensor([0.4085, 0.2175, 0.4401, 0.3058, 0.1886, 0.2751, 0.1236, 0.4792], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0350, 0.0431, 0.0356, 0.0384, 0.0382, 0.0375, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:32:44,648 INFO [finetune.py:976] (0/7) Epoch 19, batch 1900, loss[loss=0.2122, simple_loss=0.287, pruned_loss=0.06873, over 4818.00 frames. ], tot_loss[loss=0.1785, simple_loss=0.2501, pruned_loss=0.05349, over 955719.10 frames. ], batch size: 33, lr: 3.30e-03, grad_scale: 64.0 +2023-04-27 12:33:18,499 INFO [finetune.py:976] (0/7) Epoch 19, batch 1950, loss[loss=0.1747, simple_loss=0.2486, pruned_loss=0.05035, over 4874.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2483, pruned_loss=0.05303, over 954877.00 frames. ], batch size: 34, lr: 3.30e-03, grad_scale: 64.0 +2023-04-27 12:33:22,765 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.554e+02 1.843e+02 2.166e+02 4.298e+02, threshold=3.686e+02, percent-clipped=1.0 +2023-04-27 12:34:07,949 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2088, 1.5294, 1.4729, 1.7562, 1.6278, 1.8876, 1.4042, 3.4693], + device='cuda:0'), covar=tensor([0.0639, 0.0772, 0.0811, 0.1228, 0.0618, 0.0528, 0.0732, 0.0137], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 12:34:18,466 INFO [finetune.py:976] (0/7) Epoch 19, batch 2000, loss[loss=0.1341, simple_loss=0.206, pruned_loss=0.03106, over 4763.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2455, pruned_loss=0.05188, over 954715.58 frames. ], batch size: 27, lr: 3.29e-03, grad_scale: 64.0 +2023-04-27 12:35:09,343 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1471, 1.4879, 1.4403, 1.6929, 1.5960, 1.7222, 1.4017, 3.0012], + device='cuda:0'), covar=tensor([0.0643, 0.0730, 0.0695, 0.1135, 0.0580, 0.0476, 0.0718, 0.0193], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 12:35:13,435 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6590, 2.4284, 2.6791, 3.2969, 2.9820, 2.5870, 2.1039, 2.7807], + device='cuda:0'), covar=tensor([0.0818, 0.0904, 0.0575, 0.0484, 0.0605, 0.0762, 0.0730, 0.0530], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0197, 0.0180, 0.0170, 0.0176, 0.0179, 0.0150, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:35:14,516 INFO [finetune.py:976] (0/7) Epoch 19, batch 2050, loss[loss=0.1856, simple_loss=0.2463, pruned_loss=0.06248, over 4914.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.243, pruned_loss=0.05136, over 956796.36 frames. ], batch size: 37, lr: 3.29e-03, grad_scale: 64.0 +2023-04-27 12:35:15,839 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6110, 1.4741, 4.3055, 4.0301, 3.7509, 4.1682, 4.0583, 3.8052], + device='cuda:0'), covar=tensor([0.6949, 0.5511, 0.1089, 0.1796, 0.1176, 0.1383, 0.1207, 0.1505], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0306, 0.0406, 0.0407, 0.0350, 0.0408, 0.0315, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:35:18,783 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.655e+01 1.540e+02 1.837e+02 2.232e+02 4.472e+02, threshold=3.673e+02, percent-clipped=5.0 +2023-04-27 12:35:52,489 INFO [finetune.py:976] (0/7) Epoch 19, batch 2100, loss[loss=0.1977, simple_loss=0.2573, pruned_loss=0.06907, over 4856.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2423, pruned_loss=0.05118, over 956601.31 frames. ], batch size: 44, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:36:01,339 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-27 12:36:37,771 INFO [finetune.py:976] (0/7) Epoch 19, batch 2150, loss[loss=0.1925, simple_loss=0.2674, pruned_loss=0.05874, over 4903.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2456, pruned_loss=0.05229, over 956093.64 frames. ], batch size: 37, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:36:48,985 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.234e+01 1.652e+02 1.972e+02 2.377e+02 5.881e+02, threshold=3.945e+02, percent-clipped=2.0 +2023-04-27 12:37:36,678 INFO [finetune.py:976] (0/7) Epoch 19, batch 2200, loss[loss=0.1922, simple_loss=0.2621, pruned_loss=0.06118, over 4796.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.2481, pruned_loss=0.05348, over 955682.97 frames. ], batch size: 29, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:38:29,805 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.07 vs. limit=5.0 +2023-04-27 12:38:49,116 INFO [finetune.py:976] (0/7) Epoch 19, batch 2250, loss[loss=0.1859, simple_loss=0.2575, pruned_loss=0.05719, over 4929.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2491, pruned_loss=0.0535, over 955013.98 frames. ], batch size: 33, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:38:59,481 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.067e+02 1.635e+02 1.938e+02 2.374e+02 3.739e+02, threshold=3.876e+02, percent-clipped=0.0 +2023-04-27 12:39:34,811 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105388.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:39:53,432 INFO [finetune.py:976] (0/7) Epoch 19, batch 2300, loss[loss=0.2098, simple_loss=0.2822, pruned_loss=0.06868, over 4824.00 frames. ], tot_loss[loss=0.1794, simple_loss=0.2505, pruned_loss=0.05413, over 955259.95 frames. ], batch size: 47, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:40:36,455 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 12:40:36,898 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9725, 2.3995, 0.9346, 1.2668, 1.7734, 1.1803, 2.9636, 1.5094], + device='cuda:0'), covar=tensor([0.0652, 0.0605, 0.0724, 0.1226, 0.0485, 0.1017, 0.0280, 0.0654], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 12:40:58,676 INFO [finetune.py:976] (0/7) Epoch 19, batch 2350, loss[loss=0.1478, simple_loss=0.2213, pruned_loss=0.03708, over 4769.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2483, pruned_loss=0.05384, over 955375.84 frames. ], batch size: 27, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:40:58,816 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105449.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 12:41:08,131 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2542, 2.9724, 1.0329, 1.6838, 1.7494, 2.1449, 1.8810, 1.0165], + device='cuda:0'), covar=tensor([0.1404, 0.0946, 0.1749, 0.1188, 0.1035, 0.0960, 0.1357, 0.1756], + device='cuda:0'), in_proj_covar=tensor([0.0115, 0.0240, 0.0136, 0.0119, 0.0130, 0.0151, 0.0115, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 12:41:09,884 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.005e+02 1.591e+02 1.880e+02 2.234e+02 4.098e+02, threshold=3.760e+02, percent-clipped=1.0 +2023-04-27 12:41:43,026 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7287, 1.9689, 5.6129, 5.2839, 4.9403, 5.3753, 5.1022, 5.1792], + device='cuda:0'), covar=tensor([0.5654, 0.5642, 0.0950, 0.1862, 0.1183, 0.1560, 0.0744, 0.1147], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0305, 0.0406, 0.0406, 0.0349, 0.0406, 0.0314, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:41:52,228 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105486.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:42:04,809 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4121, 4.7904, 1.5859, 2.7667, 3.1752, 3.4011, 3.1710, 1.5229], + device='cuda:0'), covar=tensor([0.1220, 0.0833, 0.1815, 0.1009, 0.0826, 0.0885, 0.1348, 0.1816], + device='cuda:0'), in_proj_covar=tensor([0.0115, 0.0239, 0.0136, 0.0119, 0.0130, 0.0150, 0.0115, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 12:42:06,554 INFO [finetune.py:976] (0/7) Epoch 19, batch 2400, loss[loss=0.2191, simple_loss=0.2701, pruned_loss=0.08408, over 4798.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2447, pruned_loss=0.05222, over 955633.31 frames. ], batch size: 51, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:42:08,482 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105502.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:42:22,672 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-27 12:42:39,421 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105547.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:42:40,534 INFO [finetune.py:976] (0/7) Epoch 19, batch 2450, loss[loss=0.1738, simple_loss=0.2366, pruned_loss=0.05548, over 4860.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2418, pruned_loss=0.0512, over 957574.55 frames. ], batch size: 34, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:42:45,849 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.599e+02 1.963e+02 2.373e+02 4.323e+02, threshold=3.926e+02, percent-clipped=3.0 +2023-04-27 12:42:50,131 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105563.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:43:14,478 INFO [finetune.py:976] (0/7) Epoch 19, batch 2500, loss[loss=0.2017, simple_loss=0.2734, pruned_loss=0.06504, over 4910.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2435, pruned_loss=0.0519, over 957690.32 frames. ], batch size: 36, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:43:46,221 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9635, 1.3766, 5.1120, 4.8503, 4.4680, 4.9384, 4.5749, 4.5867], + device='cuda:0'), covar=tensor([0.6500, 0.6214, 0.1073, 0.1606, 0.0993, 0.1426, 0.1317, 0.1651], + device='cuda:0'), in_proj_covar=tensor([0.0305, 0.0304, 0.0404, 0.0405, 0.0349, 0.0406, 0.0313, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:43:47,976 INFO [finetune.py:976] (0/7) Epoch 19, batch 2550, loss[loss=0.1652, simple_loss=0.2385, pruned_loss=0.04597, over 4756.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2473, pruned_loss=0.05282, over 956347.94 frames. ], batch size: 27, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:43:53,308 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.536e+02 1.856e+02 2.246e+02 3.753e+02, threshold=3.711e+02, percent-clipped=0.0 +2023-04-27 12:44:27,713 INFO [finetune.py:976] (0/7) Epoch 19, batch 2600, loss[loss=0.1736, simple_loss=0.2422, pruned_loss=0.05248, over 4718.00 frames. ], tot_loss[loss=0.1791, simple_loss=0.25, pruned_loss=0.05412, over 957035.61 frames. ], batch size: 23, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:44:58,136 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105744.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 12:44:58,823 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4954, 1.9611, 2.3638, 3.0147, 2.4086, 1.8513, 1.8409, 2.2947], + device='cuda:0'), covar=tensor([0.3249, 0.3330, 0.1611, 0.2489, 0.2617, 0.2705, 0.4009, 0.1966], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0245, 0.0226, 0.0314, 0.0217, 0.0231, 0.0226, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 12:45:01,137 INFO [finetune.py:976] (0/7) Epoch 19, batch 2650, loss[loss=0.1993, simple_loss=0.2698, pruned_loss=0.06436, over 4820.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2505, pruned_loss=0.0538, over 955044.15 frames. ], batch size: 38, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:45:06,400 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.583e+02 1.907e+02 2.231e+02 5.599e+02, threshold=3.814e+02, percent-clipped=2.0 +2023-04-27 12:45:26,482 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3150, 1.7465, 2.1996, 2.6285, 2.1838, 1.7342, 1.5046, 2.0415], + device='cuda:0'), covar=tensor([0.3256, 0.3082, 0.1636, 0.2303, 0.2552, 0.2731, 0.4113, 0.1911], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0245, 0.0227, 0.0315, 0.0218, 0.0231, 0.0227, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 12:45:30,662 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105792.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:45:34,813 INFO [finetune.py:976] (0/7) Epoch 19, batch 2700, loss[loss=0.2027, simple_loss=0.27, pruned_loss=0.06772, over 4821.00 frames. ], tot_loss[loss=0.178, simple_loss=0.2491, pruned_loss=0.05343, over 955358.83 frames. ], batch size: 39, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:45:34,917 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=105799.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:45:44,466 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2294, 1.5403, 1.4217, 1.7657, 1.6760, 1.8744, 1.4585, 3.5667], + device='cuda:0'), covar=tensor([0.0631, 0.0770, 0.0844, 0.1164, 0.0615, 0.0519, 0.0748, 0.0133], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 12:46:05,113 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 12:46:05,632 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-27 12:46:36,273 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105842.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:46:40,484 INFO [finetune.py:976] (0/7) Epoch 19, batch 2750, loss[loss=0.1833, simple_loss=0.2514, pruned_loss=0.0576, over 3944.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2465, pruned_loss=0.05269, over 952863.59 frames. ], batch size: 17, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:46:43,048 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105853.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:46:45,896 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.998e+01 1.495e+02 1.843e+02 2.455e+02 4.470e+02, threshold=3.686e+02, percent-clipped=1.0 +2023-04-27 12:46:46,566 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=105858.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:46:47,876 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=105860.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:47:27,505 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3696, 3.3742, 0.9203, 1.6938, 1.7769, 2.3542, 2.0011, 0.9611], + device='cuda:0'), covar=tensor([0.1401, 0.0766, 0.1983, 0.1277, 0.1150, 0.1011, 0.1455, 0.2105], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0241, 0.0137, 0.0120, 0.0131, 0.0152, 0.0115, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 12:47:35,994 INFO [finetune.py:976] (0/7) Epoch 19, batch 2800, loss[loss=0.1834, simple_loss=0.2474, pruned_loss=0.05968, over 4809.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2436, pruned_loss=0.05183, over 953775.25 frames. ], batch size: 51, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:48:24,127 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4036, 3.3563, 1.0584, 1.6900, 1.8511, 2.2338, 1.8927, 0.9759], + device='cuda:0'), covar=tensor([0.1556, 0.1240, 0.1914, 0.1415, 0.1178, 0.1160, 0.1703, 0.2009], + device='cuda:0'), in_proj_covar=tensor([0.0115, 0.0240, 0.0136, 0.0119, 0.0131, 0.0151, 0.0115, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 12:48:43,336 INFO [finetune.py:976] (0/7) Epoch 19, batch 2850, loss[loss=0.1865, simple_loss=0.2378, pruned_loss=0.06767, over 4010.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2427, pruned_loss=0.05172, over 953469.61 frames. ], batch size: 65, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:48:46,013 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-04-27 12:48:53,299 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8757, 2.1453, 2.0298, 2.1772, 2.0309, 2.1238, 2.1425, 2.0137], + device='cuda:0'), covar=tensor([0.3707, 0.6220, 0.5146, 0.4682, 0.5522, 0.6731, 0.6253, 0.5769], + device='cuda:0'), in_proj_covar=tensor([0.0334, 0.0370, 0.0318, 0.0332, 0.0343, 0.0393, 0.0355, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 12:48:53,755 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.431e+02 1.804e+02 2.150e+02 4.441e+02, threshold=3.609e+02, percent-clipped=4.0 +2023-04-27 12:49:49,881 INFO [finetune.py:976] (0/7) Epoch 19, batch 2900, loss[loss=0.2207, simple_loss=0.2868, pruned_loss=0.07734, over 4895.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2443, pruned_loss=0.05228, over 953626.36 frames. ], batch size: 35, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:49:50,652 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-106000.pt +2023-04-27 12:50:40,773 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106044.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:50:43,675 INFO [finetune.py:976] (0/7) Epoch 19, batch 2950, loss[loss=0.1648, simple_loss=0.2448, pruned_loss=0.04241, over 4874.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2478, pruned_loss=0.05269, over 954417.91 frames. ], batch size: 34, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:50:43,943 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 12:50:48,563 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.617e+02 1.806e+02 2.204e+02 5.278e+02, threshold=3.611e+02, percent-clipped=1.0 +2023-04-27 12:50:57,620 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1038, 2.7757, 0.9939, 1.4035, 2.0437, 1.2745, 3.5204, 1.8217], + device='cuda:0'), covar=tensor([0.0710, 0.0587, 0.0801, 0.1310, 0.0494, 0.1015, 0.0194, 0.0632], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0051, 0.0052, 0.0074, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 12:51:01,774 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106077.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:51:07,079 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9882, 2.5911, 2.0620, 1.9357, 1.4757, 1.4790, 2.0891, 1.4008], + device='cuda:0'), covar=tensor([0.1728, 0.1403, 0.1348, 0.1623, 0.2328, 0.1915, 0.0931, 0.2057], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0210, 0.0168, 0.0203, 0.0198, 0.0183, 0.0155, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 12:51:12,300 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=106092.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:51:17,511 INFO [finetune.py:976] (0/7) Epoch 19, batch 3000, loss[loss=0.1656, simple_loss=0.2404, pruned_loss=0.04539, over 4819.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2492, pruned_loss=0.05274, over 956384.42 frames. ], batch size: 38, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:51:17,512 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 12:51:21,286 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1974, 1.6256, 1.9853, 2.2715, 2.0156, 1.6157, 1.1849, 1.7156], + device='cuda:0'), covar=tensor([0.3312, 0.3332, 0.1847, 0.2264, 0.2602, 0.2682, 0.4475, 0.2227], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0245, 0.0227, 0.0315, 0.0218, 0.0231, 0.0227, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 12:51:33,615 INFO [finetune.py:1010] (0/7) Epoch 19, validation: loss=0.1523, simple_loss=0.2226, pruned_loss=0.04099, over 2265189.00 frames. +2023-04-27 12:51:33,615 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 12:51:34,952 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3683, 1.5091, 3.8929, 3.6554, 3.4826, 3.7245, 3.7918, 3.4938], + device='cuda:0'), covar=tensor([0.6848, 0.5222, 0.1251, 0.1807, 0.1205, 0.1589, 0.1148, 0.1551], + device='cuda:0'), in_proj_covar=tensor([0.0303, 0.0301, 0.0400, 0.0402, 0.0346, 0.0401, 0.0309, 0.0362], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 12:51:42,984 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-27 12:52:18,478 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106138.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:52:26,608 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106142.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:52:36,638 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106148.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:52:37,151 INFO [finetune.py:976] (0/7) Epoch 19, batch 3050, loss[loss=0.1668, simple_loss=0.2389, pruned_loss=0.0474, over 4749.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2489, pruned_loss=0.05256, over 955721.73 frames. ], batch size: 27, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:52:46,786 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106155.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:52:48,482 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.467e+02 1.745e+02 2.097e+02 4.231e+02, threshold=3.490e+02, percent-clipped=3.0 +2023-04-27 12:52:49,218 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106158.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:53:31,156 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=106190.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:53:42,133 INFO [finetune.py:976] (0/7) Epoch 19, batch 3100, loss[loss=0.1545, simple_loss=0.2238, pruned_loss=0.04258, over 4867.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2456, pruned_loss=0.05141, over 954475.97 frames. ], batch size: 34, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:53:51,765 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=106206.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:53:51,958 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.92 vs. limit=5.0 +2023-04-27 12:54:05,953 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.64 vs. limit=5.0 +2023-04-27 12:54:35,398 INFO [finetune.py:976] (0/7) Epoch 19, batch 3150, loss[loss=0.1614, simple_loss=0.2243, pruned_loss=0.04926, over 4814.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2436, pruned_loss=0.05171, over 952308.14 frames. ], batch size: 25, lr: 3.29e-03, grad_scale: 32.0 +2023-04-27 12:54:35,548 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2093, 1.3406, 1.6514, 1.7799, 1.7309, 1.8082, 1.6795, 1.7027], + device='cuda:0'), covar=tensor([0.3456, 0.5096, 0.3959, 0.4036, 0.4948, 0.6436, 0.4795, 0.4347], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0374, 0.0321, 0.0335, 0.0345, 0.0396, 0.0359, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 12:54:40,720 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.609e+02 1.934e+02 2.307e+02 5.875e+02, threshold=3.868e+02, percent-clipped=2.0 +2023-04-27 12:54:49,433 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106268.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:55:23,648 INFO [finetune.py:976] (0/7) Epoch 19, batch 3200, loss[loss=0.1681, simple_loss=0.2303, pruned_loss=0.05294, over 4714.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2409, pruned_loss=0.05094, over 954903.28 frames. ], batch size: 59, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 12:55:26,271 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 12:55:50,417 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106329.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:55:54,729 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106336.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:56:03,600 INFO [finetune.py:976] (0/7) Epoch 19, batch 3250, loss[loss=0.1337, simple_loss=0.1935, pruned_loss=0.03702, over 4183.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2412, pruned_loss=0.0513, over 953873.67 frames. ], batch size: 18, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 12:56:06,822 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9949, 2.6602, 2.0731, 2.1545, 1.4465, 1.4466, 2.1436, 1.4163], + device='cuda:0'), covar=tensor([0.1684, 0.1422, 0.1356, 0.1567, 0.2264, 0.1912, 0.0964, 0.1997], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0212, 0.0168, 0.0203, 0.0198, 0.0184, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 12:56:08,525 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.638e+02 1.854e+02 2.216e+02 4.860e+02, threshold=3.708e+02, percent-clipped=2.0 +2023-04-27 12:56:25,528 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.01 vs. limit=5.0 +2023-04-27 12:56:35,775 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106397.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:56:37,427 INFO [finetune.py:976] (0/7) Epoch 19, batch 3300, loss[loss=0.1836, simple_loss=0.2659, pruned_loss=0.05071, over 4851.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2463, pruned_loss=0.05253, over 954568.64 frames. ], batch size: 44, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 12:56:47,322 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5725, 3.6660, 0.8446, 1.8690, 1.8961, 2.4783, 2.0475, 0.9195], + device='cuda:0'), covar=tensor([0.1429, 0.0859, 0.2106, 0.1321, 0.1111, 0.1068, 0.1634, 0.2148], + device='cuda:0'), in_proj_covar=tensor([0.0115, 0.0239, 0.0136, 0.0118, 0.0130, 0.0150, 0.0114, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 12:57:00,446 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106433.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:57:02,927 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7704, 2.2258, 1.9524, 1.5111, 1.3186, 1.3444, 1.9328, 1.2845], + device='cuda:0'), covar=tensor([0.1908, 0.1448, 0.1471, 0.2026, 0.2531, 0.2312, 0.1078, 0.2277], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0212, 0.0168, 0.0204, 0.0199, 0.0185, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 12:57:09,670 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106448.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:57:10,217 INFO [finetune.py:976] (0/7) Epoch 19, batch 3350, loss[loss=0.1993, simple_loss=0.2736, pruned_loss=0.06254, over 4903.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2481, pruned_loss=0.05277, over 954124.50 frames. ], batch size: 36, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 12:57:14,353 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=106454.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:57:14,938 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106455.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:57:16,072 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.680e+02 1.880e+02 2.311e+02 4.481e+02, threshold=3.759e+02, percent-clipped=2.0 +2023-04-27 12:57:42,142 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=106496.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:57:43,907 INFO [finetune.py:976] (0/7) Epoch 19, batch 3400, loss[loss=0.1862, simple_loss=0.2607, pruned_loss=0.05586, over 4817.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2486, pruned_loss=0.05291, over 954171.54 frames. ], batch size: 38, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 12:57:45,978 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 12:57:46,914 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=106503.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:57:54,769 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=106515.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:57:58,336 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1177, 2.7119, 1.0153, 1.4108, 2.1039, 1.3118, 3.4087, 1.6491], + device='cuda:0'), covar=tensor([0.0662, 0.0690, 0.0811, 0.1132, 0.0463, 0.0922, 0.0202, 0.0640], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 12:58:10,554 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6526, 2.1982, 0.9924, 1.4150, 2.1899, 1.5656, 1.4798, 1.5697], + device='cuda:0'), covar=tensor([0.0523, 0.0335, 0.0339, 0.0573, 0.0255, 0.0521, 0.0511, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0052, 0.0038, 0.0049, 0.0049, 0.0051], + device='cuda:0') +2023-04-27 12:58:15,442 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2555, 2.8277, 1.0845, 1.4931, 2.3909, 1.3919, 3.7953, 1.9307], + device='cuda:0'), covar=tensor([0.0635, 0.0590, 0.0797, 0.1339, 0.0446, 0.0991, 0.0281, 0.0621], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 12:58:17,776 INFO [finetune.py:976] (0/7) Epoch 19, batch 3450, loss[loss=0.1783, simple_loss=0.2436, pruned_loss=0.05651, over 4897.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2476, pruned_loss=0.05238, over 954053.28 frames. ], batch size: 32, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 12:58:23,137 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.592e+02 1.859e+02 2.190e+02 3.296e+02, threshold=3.717e+02, percent-clipped=0.0 +2023-04-27 12:58:57,664 INFO [finetune.py:976] (0/7) Epoch 19, batch 3500, loss[loss=0.2129, simple_loss=0.261, pruned_loss=0.08238, over 4825.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2456, pruned_loss=0.0522, over 954763.80 frames. ], batch size: 25, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 12:59:14,544 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106624.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 12:59:36,355 INFO [finetune.py:976] (0/7) Epoch 19, batch 3550, loss[loss=0.1632, simple_loss=0.2291, pruned_loss=0.04866, over 4831.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2428, pruned_loss=0.05128, over 955001.67 frames. ], batch size: 33, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 12:59:41,156 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.386e+01 1.482e+02 1.819e+02 2.170e+02 5.506e+02, threshold=3.638e+02, percent-clipped=1.0 +2023-04-27 12:59:46,048 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7347, 2.0665, 1.7821, 2.0088, 1.6142, 1.7899, 1.7739, 1.2976], + device='cuda:0'), covar=tensor([0.1571, 0.1397, 0.0861, 0.1066, 0.3401, 0.1237, 0.1678, 0.2377], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0301, 0.0215, 0.0276, 0.0310, 0.0258, 0.0249, 0.0264], + device='cuda:0'), out_proj_covar=tensor([1.1420e-04, 1.1934e-04, 8.5199e-05, 1.0959e-04, 1.2578e-04, 1.0200e-04, + 1.0051e-04, 1.0454e-04], device='cuda:0') +2023-04-27 13:00:05,459 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106692.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:00:14,738 INFO [finetune.py:976] (0/7) Epoch 19, batch 3600, loss[loss=0.23, simple_loss=0.2913, pruned_loss=0.08436, over 4861.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2413, pruned_loss=0.05098, over 954527.93 frames. ], batch size: 44, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:00:27,516 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 13:00:58,224 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106733.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:01:14,588 INFO [finetune.py:976] (0/7) Epoch 19, batch 3650, loss[loss=0.2318, simple_loss=0.3054, pruned_loss=0.07905, over 4906.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2454, pruned_loss=0.05238, over 956214.80 frames. ], batch size: 35, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:01:19,419 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.799e+01 1.618e+02 2.033e+02 2.562e+02 4.341e+02, threshold=4.066e+02, percent-clipped=3.0 +2023-04-27 13:01:35,794 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=106781.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:01:41,091 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4919, 1.7134, 1.6139, 2.1097, 2.0901, 1.9418, 1.7090, 4.5674], + device='cuda:0'), covar=tensor([0.0589, 0.0884, 0.0904, 0.1203, 0.0658, 0.0622, 0.0806, 0.0153], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0039, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 13:01:48,135 INFO [finetune.py:976] (0/7) Epoch 19, batch 3700, loss[loss=0.203, simple_loss=0.2762, pruned_loss=0.06486, over 4926.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2486, pruned_loss=0.05293, over 957182.19 frames. ], batch size: 38, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:01:55,079 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=106810.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:02:14,807 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5531, 1.8319, 1.8401, 1.9838, 1.8562, 1.9247, 1.9536, 1.9182], + device='cuda:0'), covar=tensor([0.4139, 0.6051, 0.5150, 0.4841, 0.6044, 0.7585, 0.5779, 0.5565], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0374, 0.0321, 0.0335, 0.0345, 0.0395, 0.0359, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:02:22,251 INFO [finetune.py:976] (0/7) Epoch 19, batch 3750, loss[loss=0.102, simple_loss=0.1667, pruned_loss=0.01865, over 4184.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2488, pruned_loss=0.05258, over 954348.16 frames. ], batch size: 17, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:02:24,966 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 13:02:27,098 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.601e+01 1.567e+02 1.830e+02 2.266e+02 4.856e+02, threshold=3.659e+02, percent-clipped=1.0 +2023-04-27 13:02:31,476 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9744, 2.3149, 1.2646, 1.7090, 2.1298, 1.8986, 1.7648, 1.9226], + device='cuda:0'), covar=tensor([0.0482, 0.0308, 0.0321, 0.0516, 0.0243, 0.0484, 0.0493, 0.0514], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 13:02:55,379 INFO [finetune.py:976] (0/7) Epoch 19, batch 3800, loss[loss=0.183, simple_loss=0.2396, pruned_loss=0.06323, over 3992.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.2495, pruned_loss=0.05306, over 953481.44 frames. ], batch size: 17, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:03:05,888 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4097, 1.1719, 1.3223, 1.6551, 1.6527, 1.3850, 0.9647, 1.5043], + device='cuda:0'), covar=tensor([0.0699, 0.1372, 0.0836, 0.0519, 0.0592, 0.0718, 0.0759, 0.0537], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0200, 0.0181, 0.0171, 0.0174, 0.0181, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:03:10,673 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106924.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:03:26,555 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0620, 2.1431, 1.8523, 1.7807, 2.1596, 1.8266, 2.6933, 1.6257], + device='cuda:0'), covar=tensor([0.3533, 0.1677, 0.4540, 0.2856, 0.1679, 0.2342, 0.1163, 0.4295], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0343, 0.0426, 0.0352, 0.0380, 0.0376, 0.0371, 0.0415], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:03:27,657 INFO [finetune.py:976] (0/7) Epoch 19, batch 3850, loss[loss=0.1463, simple_loss=0.2172, pruned_loss=0.03773, over 4813.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2478, pruned_loss=0.05222, over 954620.26 frames. ], batch size: 40, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:03:33,087 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.149e+02 1.551e+02 1.789e+02 2.154e+02 4.176e+02, threshold=3.578e+02, percent-clipped=2.0 +2023-04-27 13:03:35,637 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0213, 2.3008, 0.9254, 1.2364, 1.7758, 1.2023, 2.5213, 1.4615], + device='cuda:0'), covar=tensor([0.0650, 0.0587, 0.0587, 0.1208, 0.0391, 0.0945, 0.0296, 0.0681], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0047, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 13:03:42,312 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=106972.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:03:56,024 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=106992.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:04:00,618 INFO [finetune.py:976] (0/7) Epoch 19, batch 3900, loss[loss=0.2074, simple_loss=0.2668, pruned_loss=0.07394, over 4901.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.244, pruned_loss=0.05093, over 954288.77 frames. ], batch size: 32, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:04:33,280 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=107040.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:04:41,523 INFO [finetune.py:976] (0/7) Epoch 19, batch 3950, loss[loss=0.1567, simple_loss=0.2323, pruned_loss=0.04054, over 4856.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2415, pruned_loss=0.0506, over 956835.59 frames. ], batch size: 44, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:04:54,081 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.559e+01 1.455e+02 1.767e+02 2.125e+02 3.704e+02, threshold=3.534e+02, percent-clipped=1.0 +2023-04-27 13:05:26,563 INFO [finetune.py:976] (0/7) Epoch 19, batch 4000, loss[loss=0.1756, simple_loss=0.2479, pruned_loss=0.0517, over 4917.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2427, pruned_loss=0.05158, over 955364.79 frames. ], batch size: 37, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:05:35,400 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107110.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:06:07,233 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7441, 2.3383, 1.8764, 1.7801, 1.2843, 1.2858, 1.8990, 1.2306], + device='cuda:0'), covar=tensor([0.1716, 0.1463, 0.1396, 0.1748, 0.2276, 0.1991, 0.0998, 0.2059], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0212, 0.0169, 0.0204, 0.0199, 0.0185, 0.0156, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 13:06:15,776 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107140.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 13:06:28,916 INFO [finetune.py:976] (0/7) Epoch 19, batch 4050, loss[loss=0.1657, simple_loss=0.2355, pruned_loss=0.04799, over 4760.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2464, pruned_loss=0.05273, over 955055.23 frames. ], batch size: 28, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:06:30,823 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107152.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:06:35,807 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.479e+01 1.702e+02 1.942e+02 2.364e+02 4.795e+02, threshold=3.885e+02, percent-clipped=4.0 +2023-04-27 13:06:36,470 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=107158.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:06:51,948 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 13:07:15,574 INFO [finetune.py:976] (0/7) Epoch 19, batch 4100, loss[loss=0.1464, simple_loss=0.2115, pruned_loss=0.04063, over 3797.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2479, pruned_loss=0.05297, over 952550.27 frames. ], batch size: 16, lr: 3.28e-03, grad_scale: 64.0 +2023-04-27 13:07:16,435 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8739, 2.4535, 1.1365, 1.6662, 2.5054, 1.8802, 1.6734, 1.8438], + device='cuda:0'), covar=tensor([0.0503, 0.0316, 0.0299, 0.0529, 0.0217, 0.0504, 0.0500, 0.0554], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 13:07:17,391 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107201.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 13:07:26,143 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107213.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 13:07:48,308 INFO [finetune.py:976] (0/7) Epoch 19, batch 4150, loss[loss=0.2051, simple_loss=0.2641, pruned_loss=0.07306, over 4799.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2481, pruned_loss=0.05295, over 953338.74 frames. ], batch size: 25, lr: 3.28e-03, grad_scale: 64.0 +2023-04-27 13:07:54,579 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.951e+01 1.595e+02 1.845e+02 2.201e+02 3.820e+02, threshold=3.690e+02, percent-clipped=0.0 +2023-04-27 13:08:11,123 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4930, 3.1179, 2.5174, 2.9367, 2.2027, 2.5568, 2.7214, 1.8921], + device='cuda:0'), covar=tensor([0.1873, 0.1063, 0.0718, 0.1135, 0.2854, 0.1088, 0.1788, 0.2709], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0304, 0.0217, 0.0278, 0.0312, 0.0260, 0.0250, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1508e-04, 1.2067e-04, 8.5850e-05, 1.1035e-04, 1.2678e-04, 1.0288e-04, + 1.0100e-04, 1.0490e-04], device='cuda:0') +2023-04-27 13:08:22,023 INFO [finetune.py:976] (0/7) Epoch 19, batch 4200, loss[loss=0.2299, simple_loss=0.2909, pruned_loss=0.08444, over 4146.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2487, pruned_loss=0.05258, over 952930.48 frames. ], batch size: 65, lr: 3.28e-03, grad_scale: 64.0 +2023-04-27 13:08:32,094 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3981, 2.9981, 0.9051, 1.5592, 2.2424, 1.4983, 4.1090, 2.1652], + device='cuda:0'), covar=tensor([0.0651, 0.0760, 0.0834, 0.1199, 0.0507, 0.0950, 0.0205, 0.0559], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 13:08:42,652 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8210, 1.0605, 1.7580, 2.2320, 1.9008, 1.7451, 1.7588, 1.7566], + device='cuda:0'), covar=tensor([0.4653, 0.6588, 0.6307, 0.6032, 0.5740, 0.7800, 0.8022, 0.7883], + device='cuda:0'), in_proj_covar=tensor([0.0428, 0.0411, 0.0505, 0.0507, 0.0457, 0.0483, 0.0489, 0.0496], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:08:55,957 INFO [finetune.py:976] (0/7) Epoch 19, batch 4250, loss[loss=0.1778, simple_loss=0.2531, pruned_loss=0.05129, over 4808.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2475, pruned_loss=0.05237, over 953578.36 frames. ], batch size: 51, lr: 3.28e-03, grad_scale: 64.0 +2023-04-27 13:09:01,365 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.574e+01 1.612e+02 1.786e+02 2.196e+02 3.792e+02, threshold=3.571e+02, percent-clipped=1.0 +2023-04-27 13:09:08,190 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.68 vs. limit=5.0 +2023-04-27 13:09:29,663 INFO [finetune.py:976] (0/7) Epoch 19, batch 4300, loss[loss=0.1414, simple_loss=0.2202, pruned_loss=0.03127, over 4776.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2448, pruned_loss=0.05147, over 953474.13 frames. ], batch size: 26, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:09:32,928 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8372, 1.8843, 1.8269, 1.5303, 1.9305, 1.6266, 2.5702, 1.5712], + device='cuda:0'), covar=tensor([0.3675, 0.1904, 0.4897, 0.2869, 0.1780, 0.2582, 0.1277, 0.4718], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0344, 0.0428, 0.0352, 0.0381, 0.0376, 0.0371, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:10:03,432 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1796, 1.5206, 1.4604, 1.9028, 1.8118, 1.8781, 1.4791, 4.1948], + device='cuda:0'), covar=tensor([0.0667, 0.1089, 0.0953, 0.1347, 0.0745, 0.0647, 0.0943, 0.0192], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 13:10:31,834 INFO [finetune.py:976] (0/7) Epoch 19, batch 4350, loss[loss=0.1588, simple_loss=0.2368, pruned_loss=0.04037, over 4923.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2414, pruned_loss=0.0502, over 952651.69 frames. ], batch size: 38, lr: 3.28e-03, grad_scale: 32.0 +2023-04-27 13:10:33,782 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1659, 2.5930, 0.7485, 1.5547, 1.6698, 1.9553, 1.6927, 0.8563], + device='cuda:0'), covar=tensor([0.1572, 0.1217, 0.1941, 0.1314, 0.1085, 0.0962, 0.1495, 0.1716], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0242, 0.0138, 0.0119, 0.0132, 0.0152, 0.0116, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 13:10:37,327 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.020e+02 1.596e+02 1.862e+02 2.295e+02 4.192e+02, threshold=3.723e+02, percent-clipped=1.0 +2023-04-27 13:11:03,025 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107496.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 13:11:04,784 INFO [finetune.py:976] (0/7) Epoch 19, batch 4400, loss[loss=0.2157, simple_loss=0.2799, pruned_loss=0.07571, over 4829.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2419, pruned_loss=0.05047, over 952615.25 frames. ], batch size: 33, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:11:04,899 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5029, 1.1850, 0.4989, 1.2011, 1.1323, 1.3945, 1.2759, 1.2696], + device='cuda:0'), covar=tensor([0.0537, 0.0407, 0.0409, 0.0599, 0.0300, 0.0516, 0.0536, 0.0579], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0049, 0.0049, 0.0051], + device='cuda:0') +2023-04-27 13:11:10,422 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107508.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 13:11:11,674 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107510.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:11:49,534 INFO [finetune.py:976] (0/7) Epoch 19, batch 4450, loss[loss=0.1409, simple_loss=0.2052, pruned_loss=0.03834, over 4202.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2458, pruned_loss=0.05191, over 953070.59 frames. ], batch size: 18, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:12:00,085 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.632e+02 1.928e+02 2.326e+02 3.887e+02, threshold=3.856e+02, percent-clipped=1.0 +2023-04-27 13:12:10,836 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6740, 1.4038, 1.2965, 1.5312, 1.9167, 1.5921, 1.2941, 1.2281], + device='cuda:0'), covar=tensor([0.1398, 0.1270, 0.1666, 0.1116, 0.0793, 0.1368, 0.1678, 0.2126], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0312, 0.0349, 0.0289, 0.0328, 0.0307, 0.0299, 0.0369], + device='cuda:0'), out_proj_covar=tensor([6.3505e-05, 6.4722e-05, 7.3920e-05, 5.8562e-05, 6.8077e-05, 6.4583e-05, + 6.2626e-05, 7.8682e-05], device='cuda:0') +2023-04-27 13:12:12,127 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 13:12:18,624 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107571.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:12:43,863 INFO [finetune.py:976] (0/7) Epoch 19, batch 4500, loss[loss=0.1618, simple_loss=0.2265, pruned_loss=0.0486, over 4901.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2474, pruned_loss=0.05197, over 952445.90 frames. ], batch size: 32, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:13:17,593 INFO [finetune.py:976] (0/7) Epoch 19, batch 4550, loss[loss=0.1426, simple_loss=0.2258, pruned_loss=0.02976, over 4888.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2479, pruned_loss=0.05194, over 950805.82 frames. ], batch size: 32, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:13:23,029 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.130e+02 1.628e+02 1.814e+02 2.190e+02 5.311e+02, threshold=3.629e+02, percent-clipped=2.0 +2023-04-27 13:13:23,750 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107659.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:13:50,228 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-27 13:13:51,122 INFO [finetune.py:976] (0/7) Epoch 19, batch 4600, loss[loss=0.1594, simple_loss=0.2326, pruned_loss=0.04308, over 4758.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.247, pruned_loss=0.05184, over 952391.56 frames. ], batch size: 26, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:13:54,955 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3095, 1.5138, 1.7619, 1.8615, 1.8000, 1.8494, 1.8502, 1.8233], + device='cuda:0'), covar=tensor([0.3792, 0.4965, 0.4029, 0.4106, 0.5168, 0.6889, 0.5167, 0.4363], + device='cuda:0'), in_proj_covar=tensor([0.0335, 0.0371, 0.0320, 0.0334, 0.0345, 0.0393, 0.0357, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:14:04,034 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107720.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:14:16,202 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=107737.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:14:24,297 INFO [finetune.py:976] (0/7) Epoch 19, batch 4650, loss[loss=0.1777, simple_loss=0.2464, pruned_loss=0.05452, over 4810.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2446, pruned_loss=0.05109, over 951996.72 frames. ], batch size: 45, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:14:29,768 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.588e+02 1.876e+02 2.187e+02 4.173e+02, threshold=3.752e+02, percent-clipped=2.0 +2023-04-27 13:15:06,650 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107796.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 13:15:07,909 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=107798.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 13:15:08,399 INFO [finetune.py:976] (0/7) Epoch 19, batch 4700, loss[loss=0.1938, simple_loss=0.2631, pruned_loss=0.06228, over 4773.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2418, pruned_loss=0.05074, over 952206.59 frames. ], batch size: 28, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:15:19,595 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=107808.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:15:25,139 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6143, 2.2914, 2.6529, 3.0871, 2.9764, 2.6315, 2.1581, 2.8473], + device='cuda:0'), covar=tensor([0.0866, 0.1096, 0.0574, 0.0531, 0.0547, 0.0808, 0.0717, 0.0473], + device='cuda:0'), in_proj_covar=tensor([0.0189, 0.0203, 0.0183, 0.0173, 0.0178, 0.0182, 0.0152, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:15:48,339 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=107844.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 13:15:52,873 INFO [finetune.py:976] (0/7) Epoch 19, batch 4750, loss[loss=0.1461, simple_loss=0.21, pruned_loss=0.04108, over 4298.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.24, pruned_loss=0.05037, over 952930.84 frames. ], batch size: 18, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:15:56,548 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0076, 2.4687, 2.0445, 2.3522, 1.6549, 1.9944, 1.9750, 1.5378], + device='cuda:0'), covar=tensor([0.1900, 0.1197, 0.0711, 0.0978, 0.3295, 0.1094, 0.1758, 0.2530], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0304, 0.0217, 0.0280, 0.0314, 0.0261, 0.0252, 0.0267], + device='cuda:0'), out_proj_covar=tensor([1.1572e-04, 1.2071e-04, 8.6151e-05, 1.1085e-04, 1.2728e-04, 1.0331e-04, + 1.0179e-04, 1.0586e-04], device='cuda:0') +2023-04-27 13:15:57,093 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=107856.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:15:58,278 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.851e+01 1.548e+02 1.806e+02 2.304e+02 4.398e+02, threshold=3.612e+02, percent-clipped=1.0 +2023-04-27 13:16:09,256 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=107866.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:16:30,398 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6496, 2.0808, 1.6843, 1.9989, 1.3948, 1.6598, 1.8092, 1.3862], + device='cuda:0'), covar=tensor([0.2410, 0.1609, 0.1232, 0.1586, 0.3865, 0.1545, 0.2009, 0.2782], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0305, 0.0218, 0.0281, 0.0315, 0.0262, 0.0253, 0.0268], + device='cuda:0'), out_proj_covar=tensor([1.1610e-04, 1.2127e-04, 8.6499e-05, 1.1126e-04, 1.2769e-04, 1.0365e-04, + 1.0205e-04, 1.0614e-04], device='cuda:0') +2023-04-27 13:16:53,228 INFO [finetune.py:976] (0/7) Epoch 19, batch 4800, loss[loss=0.2248, simple_loss=0.3012, pruned_loss=0.07426, over 4822.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2441, pruned_loss=0.05156, over 952661.59 frames. ], batch size: 39, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:17:31,834 INFO [finetune.py:976] (0/7) Epoch 19, batch 4850, loss[loss=0.1914, simple_loss=0.2607, pruned_loss=0.06105, over 4920.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2473, pruned_loss=0.05207, over 954533.25 frames. ], batch size: 42, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:17:37,759 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.607e+02 1.917e+02 2.218e+02 6.571e+02, threshold=3.834e+02, percent-clipped=2.0 +2023-04-27 13:18:04,088 INFO [finetune.py:976] (0/7) Epoch 19, batch 4900, loss[loss=0.1654, simple_loss=0.2473, pruned_loss=0.04175, over 4893.00 frames. ], tot_loss[loss=0.1776, simple_loss=0.2495, pruned_loss=0.05285, over 956147.86 frames. ], batch size: 43, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:18:05,343 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-108000.pt +2023-04-27 13:18:16,701 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108015.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:18:27,263 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-27 13:18:38,326 INFO [finetune.py:976] (0/7) Epoch 19, batch 4950, loss[loss=0.1475, simple_loss=0.2239, pruned_loss=0.03552, over 4334.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2481, pruned_loss=0.05143, over 953440.64 frames. ], batch size: 65, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:18:45,329 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.581e+02 1.882e+02 2.406e+02 4.521e+02, threshold=3.765e+02, percent-clipped=4.0 +2023-04-27 13:19:02,564 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 13:19:07,750 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108093.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 13:19:11,337 INFO [finetune.py:976] (0/7) Epoch 19, batch 5000, loss[loss=0.1557, simple_loss=0.2244, pruned_loss=0.04353, over 4897.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2473, pruned_loss=0.05096, over 955017.97 frames. ], batch size: 35, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:19:40,637 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7116, 1.8675, 1.8730, 2.5210, 2.6353, 2.1422, 2.1331, 1.9240], + device='cuda:0'), covar=tensor([0.2177, 0.1982, 0.2143, 0.2191, 0.1973, 0.2554, 0.2377, 0.2552], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0313, 0.0351, 0.0291, 0.0329, 0.0309, 0.0300, 0.0372], + device='cuda:0'), out_proj_covar=tensor([6.3769e-05, 6.4910e-05, 7.4500e-05, 5.8898e-05, 6.8253e-05, 6.4812e-05, + 6.2923e-05, 7.9251e-05], device='cuda:0') +2023-04-27 13:19:44,757 INFO [finetune.py:976] (0/7) Epoch 19, batch 5050, loss[loss=0.1585, simple_loss=0.233, pruned_loss=0.04197, over 4757.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2448, pruned_loss=0.051, over 955124.98 frames. ], batch size: 26, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:19:51,164 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.142e+01 1.502e+02 1.806e+02 2.114e+02 4.959e+02, threshold=3.611e+02, percent-clipped=2.0 +2023-04-27 13:19:55,857 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1273, 2.6882, 0.9866, 1.4370, 2.0567, 1.2698, 3.3653, 1.7528], + device='cuda:0'), covar=tensor([0.0671, 0.0641, 0.0842, 0.1118, 0.0467, 0.0899, 0.0199, 0.0580], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 13:19:57,071 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108166.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:20:29,632 INFO [finetune.py:976] (0/7) Epoch 19, batch 5100, loss[loss=0.1519, simple_loss=0.2208, pruned_loss=0.04152, over 4929.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2411, pruned_loss=0.04996, over 954105.39 frames. ], batch size: 38, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:20:50,604 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=108214.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:21:20,249 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6263, 3.1550, 1.0623, 1.7402, 2.3759, 1.6966, 4.5690, 2.4772], + device='cuda:0'), covar=tensor([0.0611, 0.0666, 0.0839, 0.1272, 0.0526, 0.0974, 0.0195, 0.0559], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 13:21:20,868 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8792, 2.2397, 0.8614, 1.2452, 1.4772, 1.1435, 2.4652, 1.3895], + device='cuda:0'), covar=tensor([0.0658, 0.0630, 0.0649, 0.1114, 0.0448, 0.0943, 0.0284, 0.0640], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 13:21:34,995 INFO [finetune.py:976] (0/7) Epoch 19, batch 5150, loss[loss=0.1728, simple_loss=0.2383, pruned_loss=0.0536, over 4724.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2415, pruned_loss=0.05042, over 953503.39 frames. ], batch size: 23, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:21:44,948 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.855e+01 1.529e+02 1.881e+02 2.347e+02 3.720e+02, threshold=3.762e+02, percent-clipped=1.0 +2023-04-27 13:22:29,161 INFO [finetune.py:976] (0/7) Epoch 19, batch 5200, loss[loss=0.1949, simple_loss=0.2879, pruned_loss=0.05096, over 4824.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.244, pruned_loss=0.05121, over 953142.69 frames. ], batch size: 39, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:22:29,884 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8751, 1.0866, 3.2488, 3.0138, 2.9233, 3.1862, 3.1491, 2.8616], + device='cuda:0'), covar=tensor([0.7746, 0.6043, 0.1817, 0.2647, 0.1577, 0.2227, 0.1928, 0.2079], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0302, 0.0405, 0.0405, 0.0349, 0.0406, 0.0312, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:22:51,794 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108315.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:23:05,675 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108325.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:23:28,830 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.66 vs. limit=2.0 +2023-04-27 13:23:36,574 INFO [finetune.py:976] (0/7) Epoch 19, batch 5250, loss[loss=0.1594, simple_loss=0.232, pruned_loss=0.04341, over 4749.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2459, pruned_loss=0.05143, over 951954.81 frames. ], batch size: 54, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:23:44,340 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.109e+02 1.684e+02 2.011e+02 2.395e+02 3.683e+02, threshold=4.022e+02, percent-clipped=0.0 +2023-04-27 13:23:53,055 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=108363.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:24:19,499 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108386.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:24:23,679 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108393.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:24:27,186 INFO [finetune.py:976] (0/7) Epoch 19, batch 5300, loss[loss=0.1666, simple_loss=0.2531, pruned_loss=0.04009, over 4820.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2479, pruned_loss=0.05232, over 951187.70 frames. ], batch size: 47, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:24:49,147 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-27 13:24:50,802 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108432.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:24:56,168 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=108441.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:25:01,041 INFO [finetune.py:976] (0/7) Epoch 19, batch 5350, loss[loss=0.1584, simple_loss=0.2342, pruned_loss=0.04128, over 4757.00 frames. ], tot_loss[loss=0.1753, simple_loss=0.2474, pruned_loss=0.05162, over 951683.46 frames. ], batch size: 27, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:25:06,497 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.103e+02 1.659e+02 1.988e+02 2.329e+02 5.055e+02, threshold=3.977e+02, percent-clipped=1.0 +2023-04-27 13:25:31,232 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108493.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 13:25:34,731 INFO [finetune.py:976] (0/7) Epoch 19, batch 5400, loss[loss=0.1449, simple_loss=0.2206, pruned_loss=0.03462, over 4793.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2451, pruned_loss=0.05118, over 951637.32 frames. ], batch size: 45, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:25:43,407 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6968, 1.9514, 0.8622, 1.4207, 1.8267, 1.6209, 1.5072, 1.6288], + device='cuda:0'), covar=tensor([0.0484, 0.0340, 0.0330, 0.0541, 0.0265, 0.0487, 0.0465, 0.0562], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0051], + device='cuda:0') +2023-04-27 13:26:05,795 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1576, 2.5220, 0.7882, 1.5183, 1.5655, 1.7338, 1.6137, 0.8422], + device='cuda:0'), covar=tensor([0.1436, 0.1139, 0.1832, 0.1247, 0.1052, 0.0989, 0.1530, 0.1650], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0243, 0.0140, 0.0120, 0.0133, 0.0153, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 13:26:31,589 INFO [finetune.py:976] (0/7) Epoch 19, batch 5450, loss[loss=0.1779, simple_loss=0.2432, pruned_loss=0.05633, over 4855.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2439, pruned_loss=0.05161, over 952795.07 frames. ], batch size: 44, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:26:42,381 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.852e+01 1.533e+02 1.903e+02 2.214e+02 4.592e+02, threshold=3.806e+02, percent-clipped=2.0 +2023-04-27 13:27:06,390 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108577.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:27:25,457 INFO [finetune.py:976] (0/7) Epoch 19, batch 5500, loss[loss=0.1199, simple_loss=0.1928, pruned_loss=0.02353, over 4788.00 frames. ], tot_loss[loss=0.17, simple_loss=0.24, pruned_loss=0.05002, over 954409.24 frames. ], batch size: 29, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:27:33,604 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7912, 2.1149, 0.9092, 1.5571, 2.2333, 1.7375, 1.6151, 1.7274], + device='cuda:0'), covar=tensor([0.0486, 0.0339, 0.0310, 0.0542, 0.0227, 0.0500, 0.0480, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0050, 0.0045, 0.0038, 0.0051, 0.0038, 0.0049, 0.0049, 0.0050], + device='cuda:0') +2023-04-27 13:27:51,041 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108638.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:27:58,828 INFO [finetune.py:976] (0/7) Epoch 19, batch 5550, loss[loss=0.2034, simple_loss=0.2698, pruned_loss=0.06855, over 4927.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2418, pruned_loss=0.05075, over 956050.01 frames. ], batch size: 33, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:28:09,999 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.131e+01 1.592e+02 1.937e+02 2.305e+02 3.019e+02, threshold=3.873e+02, percent-clipped=0.0 +2023-04-27 13:28:21,868 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5597, 1.4762, 1.9027, 1.9906, 1.4187, 1.2538, 1.5669, 1.0381], + device='cuda:0'), covar=tensor([0.0587, 0.0661, 0.0375, 0.0565, 0.0874, 0.1216, 0.0656, 0.0684], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0069, 0.0067, 0.0067, 0.0075, 0.0096, 0.0073, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 13:28:32,531 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4741, 1.9446, 2.3085, 2.8032, 2.2694, 1.8622, 1.7320, 2.1302], + device='cuda:0'), covar=tensor([0.3325, 0.3119, 0.1605, 0.2600, 0.2858, 0.2656, 0.3969, 0.2140], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0245, 0.0226, 0.0315, 0.0218, 0.0232, 0.0227, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 13:28:41,411 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108681.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:29:03,284 INFO [finetune.py:976] (0/7) Epoch 19, batch 5600, loss[loss=0.1753, simple_loss=0.2644, pruned_loss=0.04308, over 4810.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2458, pruned_loss=0.05121, over 955808.06 frames. ], batch size: 45, lr: 3.27e-03, grad_scale: 32.0 +2023-04-27 13:30:00,556 INFO [finetune.py:976] (0/7) Epoch 19, batch 5650, loss[loss=0.1815, simple_loss=0.2624, pruned_loss=0.05027, over 4865.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2484, pruned_loss=0.05149, over 956874.42 frames. ], batch size: 31, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:30:17,476 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.047e+02 1.527e+02 1.868e+02 2.111e+02 3.831e+02, threshold=3.735e+02, percent-clipped=0.0 +2023-04-27 13:30:52,594 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108788.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 13:30:54,445 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=108791.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:31:04,844 INFO [finetune.py:976] (0/7) Epoch 19, batch 5700, loss[loss=0.1684, simple_loss=0.2252, pruned_loss=0.05575, over 4121.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.243, pruned_loss=0.05073, over 933670.48 frames. ], batch size: 18, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:31:27,328 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-19.pt +2023-04-27 13:31:40,722 INFO [finetune.py:976] (0/7) Epoch 20, batch 0, loss[loss=0.1863, simple_loss=0.2582, pruned_loss=0.05724, over 4863.00 frames. ], tot_loss[loss=0.1863, simple_loss=0.2582, pruned_loss=0.05724, over 4863.00 frames. ], batch size: 31, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:31:40,724 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 13:31:47,959 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1998, 2.6172, 1.0644, 1.4728, 1.8093, 1.3964, 3.0744, 1.7839], + device='cuda:0'), covar=tensor([0.0668, 0.0628, 0.0684, 0.1109, 0.0463, 0.0868, 0.0269, 0.0532], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0047, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 13:31:49,325 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3047, 1.4845, 1.7974, 1.9579, 1.8669, 1.9991, 1.8287, 1.8799], + device='cuda:0'), covar=tensor([0.3933, 0.5527, 0.4486, 0.4760, 0.5602, 0.7014, 0.5498, 0.4753], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0373, 0.0321, 0.0334, 0.0345, 0.0394, 0.0357, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:31:57,318 INFO [finetune.py:1010] (0/7) Epoch 20, validation: loss=0.1536, simple_loss=0.2249, pruned_loss=0.04109, over 2265189.00 frames. +2023-04-27 13:31:57,318 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 13:32:13,977 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=108852.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:32:17,458 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.429e+02 1.789e+02 2.182e+02 4.169e+02, threshold=3.578e+02, percent-clipped=1.0 +2023-04-27 13:32:18,283 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 13:32:30,439 INFO [finetune.py:976] (0/7) Epoch 20, batch 50, loss[loss=0.1687, simple_loss=0.2399, pruned_loss=0.04877, over 4847.00 frames. ], tot_loss[loss=0.179, simple_loss=0.2517, pruned_loss=0.0531, over 216386.08 frames. ], batch size: 49, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:32:45,183 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7901, 2.8254, 2.2126, 3.2700, 2.8621, 2.7928, 1.2403, 2.7131], + device='cuda:0'), covar=tensor([0.2427, 0.1719, 0.3408, 0.3193, 0.2765, 0.2332, 0.5669, 0.2999], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0213, 0.0247, 0.0303, 0.0293, 0.0246, 0.0269, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:33:03,608 INFO [finetune.py:976] (0/7) Epoch 20, batch 100, loss[loss=0.1848, simple_loss=0.2544, pruned_loss=0.05765, over 4870.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.245, pruned_loss=0.05219, over 380727.48 frames. ], batch size: 34, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:33:08,744 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=108933.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:33:22,262 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7500, 2.0962, 1.7900, 2.0813, 1.6204, 1.7941, 1.7243, 1.4103], + device='cuda:0'), covar=tensor([0.1626, 0.1125, 0.0801, 0.0995, 0.3303, 0.1000, 0.1651, 0.2282], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0303, 0.0218, 0.0279, 0.0314, 0.0260, 0.0251, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1557e-04, 1.2006e-04, 8.6276e-05, 1.1079e-04, 1.2740e-04, 1.0276e-04, + 1.0141e-04, 1.0494e-04], device='cuda:0') +2023-04-27 13:33:23,934 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.275e+01 1.453e+02 1.763e+02 2.128e+02 3.737e+02, threshold=3.527e+02, percent-clipped=2.0 +2023-04-27 13:33:36,948 INFO [finetune.py:976] (0/7) Epoch 20, batch 150, loss[loss=0.2284, simple_loss=0.2895, pruned_loss=0.08362, over 4830.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2404, pruned_loss=0.05099, over 505950.58 frames. ], batch size: 39, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:33:39,410 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=108981.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:33:41,000 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5561, 1.4735, 1.8144, 1.8912, 1.3757, 1.2781, 1.5079, 0.9448], + device='cuda:0'), covar=tensor([0.0624, 0.0700, 0.0403, 0.0591, 0.0861, 0.1183, 0.0761, 0.0695], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0067, 0.0067, 0.0074, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 13:34:01,189 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-27 13:34:07,536 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 13:34:09,751 INFO [finetune.py:976] (0/7) Epoch 20, batch 200, loss[loss=0.2099, simple_loss=0.2735, pruned_loss=0.07317, over 4829.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2391, pruned_loss=0.05053, over 606563.65 frames. ], batch size: 47, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:34:11,043 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=109029.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:34:16,942 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109037.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 13:34:24,864 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5965, 3.9851, 0.7314, 1.8040, 2.0582, 2.4638, 2.3033, 0.8692], + device='cuda:0'), covar=tensor([0.1545, 0.1155, 0.2293, 0.1438, 0.1199, 0.1256, 0.1527, 0.2457], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0243, 0.0139, 0.0120, 0.0132, 0.0153, 0.0118, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 13:34:29,140 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0157, 2.4514, 2.0323, 2.3834, 1.7423, 2.1327, 1.9798, 1.6777], + device='cuda:0'), covar=tensor([0.1798, 0.1122, 0.0783, 0.1034, 0.3336, 0.0959, 0.1752, 0.2418], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0301, 0.0216, 0.0278, 0.0311, 0.0257, 0.0249, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1458e-04, 1.1925e-04, 8.5426e-05, 1.1006e-04, 1.2634e-04, 1.0171e-04, + 1.0065e-04, 1.0415e-04], device='cuda:0') +2023-04-27 13:34:29,599 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.602e+02 1.908e+02 2.213e+02 3.365e+02, threshold=3.817e+02, percent-clipped=0.0 +2023-04-27 13:34:42,781 INFO [finetune.py:976] (0/7) Epoch 20, batch 250, loss[loss=0.1801, simple_loss=0.2544, pruned_loss=0.05291, over 4824.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2409, pruned_loss=0.05023, over 682954.85 frames. ], batch size: 33, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:35:02,385 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109088.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:35:13,177 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109098.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 13:35:25,448 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4238, 1.7157, 1.7933, 1.9142, 1.7329, 1.8142, 1.8263, 1.8513], + device='cuda:0'), covar=tensor([0.4080, 0.5867, 0.4822, 0.4482, 0.5895, 0.7485, 0.5874, 0.5559], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0374, 0.0323, 0.0336, 0.0347, 0.0395, 0.0358, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:35:31,288 INFO [finetune.py:976] (0/7) Epoch 20, batch 300, loss[loss=0.1439, simple_loss=0.2161, pruned_loss=0.03584, over 4767.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2426, pruned_loss=0.04998, over 743634.67 frames. ], batch size: 28, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:35:49,151 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=109136.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:35:56,828 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109147.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:36:01,824 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7657, 2.3401, 1.8079, 1.7324, 1.2863, 1.3781, 1.8828, 1.2488], + device='cuda:0'), covar=tensor([0.1854, 0.1592, 0.1493, 0.1905, 0.2468, 0.2031, 0.1029, 0.2131], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0212, 0.0169, 0.0203, 0.0200, 0.0185, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 13:36:03,479 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.124e+02 1.663e+02 2.046e+02 2.623e+02 4.738e+02, threshold=4.093e+02, percent-clipped=3.0 +2023-04-27 13:36:25,687 INFO [finetune.py:976] (0/7) Epoch 20, batch 350, loss[loss=0.1614, simple_loss=0.2418, pruned_loss=0.04053, over 4798.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2446, pruned_loss=0.0506, over 791145.51 frames. ], batch size: 45, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:36:47,062 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109194.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:36:47,680 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5180, 2.2356, 2.5651, 2.8731, 2.7565, 2.3783, 2.0755, 2.5980], + device='cuda:0'), covar=tensor([0.0794, 0.0988, 0.0530, 0.0511, 0.0587, 0.0853, 0.0669, 0.0541], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0199, 0.0180, 0.0170, 0.0176, 0.0180, 0.0150, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:37:01,823 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4574, 1.7723, 1.8298, 1.9547, 1.7475, 1.8418, 1.8931, 1.9022], + device='cuda:0'), covar=tensor([0.3850, 0.6110, 0.4691, 0.4614, 0.5886, 0.7754, 0.5646, 0.5308], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0373, 0.0323, 0.0336, 0.0346, 0.0396, 0.0358, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:37:21,407 INFO [finetune.py:976] (0/7) Epoch 20, batch 400, loss[loss=0.1636, simple_loss=0.2452, pruned_loss=0.04101, over 4911.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2454, pruned_loss=0.05047, over 828093.08 frames. ], batch size: 33, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:37:31,604 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109233.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:38:03,869 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109255.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:38:05,568 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.658e+02 1.948e+02 2.358e+02 4.073e+02, threshold=3.896e+02, percent-clipped=0.0 +2023-04-27 13:38:22,928 INFO [finetune.py:976] (0/7) Epoch 20, batch 450, loss[loss=0.167, simple_loss=0.2378, pruned_loss=0.04806, over 4877.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2448, pruned_loss=0.05001, over 854353.88 frames. ], batch size: 32, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:38:25,925 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=109281.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:38:27,191 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0967, 1.2525, 4.9439, 4.6677, 4.2977, 4.7156, 4.3500, 4.3472], + device='cuda:0'), covar=tensor([0.6446, 0.5959, 0.1014, 0.1777, 0.1107, 0.1281, 0.1436, 0.1575], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0301, 0.0404, 0.0404, 0.0348, 0.0406, 0.0311, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:38:34,958 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109294.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:38:50,549 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109318.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:38:55,898 INFO [finetune.py:976] (0/7) Epoch 20, batch 500, loss[loss=0.196, simple_loss=0.2617, pruned_loss=0.06512, over 4916.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2443, pruned_loss=0.05014, over 878318.45 frames. ], batch size: 36, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:39:16,055 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109355.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:39:17,760 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.900e+01 1.534e+02 1.939e+02 2.475e+02 3.939e+02, threshold=3.877e+02, percent-clipped=2.0 +2023-04-27 13:39:29,355 INFO [finetune.py:976] (0/7) Epoch 20, batch 550, loss[loss=0.1969, simple_loss=0.2583, pruned_loss=0.06774, over 4823.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2422, pruned_loss=0.04998, over 894620.71 frames. ], batch size: 38, lr: 3.26e-03, grad_scale: 32.0 +2023-04-27 13:39:30,733 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109379.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 13:39:40,651 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109393.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 13:40:01,194 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3777, 1.6525, 1.7640, 1.8787, 1.6854, 1.7346, 1.8393, 1.8402], + device='cuda:0'), covar=tensor([0.4082, 0.5572, 0.4344, 0.4236, 0.5649, 0.7392, 0.5487, 0.4728], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0373, 0.0322, 0.0335, 0.0345, 0.0394, 0.0358, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:40:01,841 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6516, 0.6750, 1.4922, 2.0500, 1.7645, 1.5344, 1.5424, 1.5708], + device='cuda:0'), covar=tensor([0.4322, 0.6662, 0.6000, 0.5706, 0.5703, 0.7344, 0.7432, 0.8043], + device='cuda:0'), in_proj_covar=tensor([0.0430, 0.0411, 0.0506, 0.0506, 0.0457, 0.0485, 0.0492, 0.0499], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:40:02,926 INFO [finetune.py:976] (0/7) Epoch 20, batch 600, loss[loss=0.2489, simple_loss=0.3153, pruned_loss=0.09127, over 4730.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2432, pruned_loss=0.05104, over 908389.72 frames. ], batch size: 59, lr: 3.26e-03, grad_scale: 64.0 +2023-04-27 13:40:07,258 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4700, 1.5179, 1.8511, 1.8887, 1.4083, 1.2844, 1.5195, 1.0306], + device='cuda:0'), covar=tensor([0.0577, 0.0597, 0.0388, 0.0707, 0.0827, 0.1245, 0.0716, 0.0643], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0066, 0.0067, 0.0074, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 13:40:16,605 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109447.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:40:22,090 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 13:40:24,266 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.599e+02 2.024e+02 2.481e+02 5.741e+02, threshold=4.048e+02, percent-clipped=1.0 +2023-04-27 13:40:36,277 INFO [finetune.py:976] (0/7) Epoch 20, batch 650, loss[loss=0.1524, simple_loss=0.2296, pruned_loss=0.03756, over 4855.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2471, pruned_loss=0.05196, over 919532.97 frames. ], batch size: 25, lr: 3.26e-03, grad_scale: 64.0 +2023-04-27 13:40:48,770 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=109495.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:40:48,914 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.21 vs. limit=5.0 +2023-04-27 13:41:10,002 INFO [finetune.py:976] (0/7) Epoch 20, batch 700, loss[loss=0.1813, simple_loss=0.2549, pruned_loss=0.05381, over 4911.00 frames. ], tot_loss[loss=0.1766, simple_loss=0.2484, pruned_loss=0.05238, over 926882.53 frames. ], batch size: 33, lr: 3.26e-03, grad_scale: 64.0 +2023-04-27 13:41:14,982 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6190, 1.1959, 4.3078, 4.0755, 3.7313, 4.0703, 4.0069, 3.7940], + device='cuda:0'), covar=tensor([0.6599, 0.6058, 0.1090, 0.1594, 0.1144, 0.1720, 0.1337, 0.1475], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0304, 0.0405, 0.0407, 0.0351, 0.0409, 0.0313, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:41:25,572 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109550.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:41:30,317 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.590e+02 1.898e+02 2.337e+02 3.520e+02, threshold=3.796e+02, percent-clipped=0.0 +2023-04-27 13:41:36,210 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7964, 2.1139, 2.0360, 2.1903, 1.9904, 2.1443, 2.1327, 2.0470], + device='cuda:0'), covar=tensor([0.4006, 0.6113, 0.5250, 0.4491, 0.5821, 0.7013, 0.6577, 0.5834], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0375, 0.0323, 0.0336, 0.0346, 0.0395, 0.0360, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:41:43,809 INFO [finetune.py:976] (0/7) Epoch 20, batch 750, loss[loss=0.1978, simple_loss=0.2622, pruned_loss=0.06668, over 4929.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2487, pruned_loss=0.05242, over 931881.55 frames. ], batch size: 42, lr: 3.26e-03, grad_scale: 64.0 +2023-04-27 13:41:45,096 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109579.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:42:49,901 INFO [finetune.py:976] (0/7) Epoch 20, batch 800, loss[loss=0.1607, simple_loss=0.2332, pruned_loss=0.04411, over 4185.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2485, pruned_loss=0.05206, over 935603.45 frames. ], batch size: 66, lr: 3.25e-03, grad_scale: 64.0 +2023-04-27 13:43:07,614 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109640.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:43:19,824 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109650.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:43:27,909 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.469e+02 1.785e+02 2.276e+02 4.254e+02, threshold=3.571e+02, percent-clipped=2.0 +2023-04-27 13:43:28,673 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=109659.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:43:50,493 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109674.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 13:43:52,233 INFO [finetune.py:976] (0/7) Epoch 20, batch 850, loss[loss=0.1593, simple_loss=0.2232, pruned_loss=0.04776, over 4834.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2464, pruned_loss=0.05169, over 938502.87 frames. ], batch size: 47, lr: 3.25e-03, grad_scale: 64.0 +2023-04-27 13:44:03,204 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1807, 3.0517, 2.4463, 2.7036, 2.2354, 2.5344, 2.7177, 2.1029], + device='cuda:0'), covar=tensor([0.2115, 0.1125, 0.0747, 0.1378, 0.2923, 0.1172, 0.1638, 0.2418], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0307, 0.0221, 0.0284, 0.0318, 0.0264, 0.0255, 0.0269], + device='cuda:0'), out_proj_covar=tensor([1.1755e-04, 1.2200e-04, 8.7734e-05, 1.1252e-04, 1.2889e-04, 1.0449e-04, + 1.0294e-04, 1.0654e-04], device='cuda:0') +2023-04-27 13:44:05,763 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-27 13:44:08,055 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109693.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 13:44:26,918 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=109720.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:44:31,595 INFO [finetune.py:976] (0/7) Epoch 20, batch 900, loss[loss=0.1272, simple_loss=0.1985, pruned_loss=0.02794, over 4908.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2443, pruned_loss=0.05132, over 942962.84 frames. ], batch size: 46, lr: 3.25e-03, grad_scale: 64.0 +2023-04-27 13:44:34,782 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7869, 1.3600, 1.4432, 1.6061, 1.9844, 1.5768, 1.2810, 1.3505], + device='cuda:0'), covar=tensor([0.1600, 0.1625, 0.1919, 0.1326, 0.0742, 0.1632, 0.2063, 0.2320], + device='cuda:0'), in_proj_covar=tensor([0.0306, 0.0308, 0.0347, 0.0286, 0.0322, 0.0305, 0.0297, 0.0366], + device='cuda:0'), out_proj_covar=tensor([6.2918e-05, 6.3788e-05, 7.3371e-05, 5.7809e-05, 6.6732e-05, 6.3936e-05, + 6.2350e-05, 7.7956e-05], device='cuda:0') +2023-04-27 13:44:40,141 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=109741.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 13:44:50,920 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.301e+02 1.595e+02 1.882e+02 2.269e+02 5.455e+02, threshold=3.764e+02, percent-clipped=0.0 +2023-04-27 13:45:03,938 INFO [finetune.py:976] (0/7) Epoch 20, batch 950, loss[loss=0.195, simple_loss=0.2503, pruned_loss=0.06983, over 4174.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2429, pruned_loss=0.05138, over 944661.82 frames. ], batch size: 65, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:45:08,917 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 13:45:38,011 INFO [finetune.py:976] (0/7) Epoch 20, batch 1000, loss[loss=0.2431, simple_loss=0.3177, pruned_loss=0.08428, over 4848.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2434, pruned_loss=0.05147, over 947338.27 frames. ], batch size: 44, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:45:40,004 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2606, 1.5373, 1.6970, 1.7860, 1.6339, 1.7559, 1.7296, 1.7560], + device='cuda:0'), covar=tensor([0.4389, 0.5826, 0.4651, 0.4600, 0.5704, 0.6749, 0.5479, 0.4959], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0373, 0.0321, 0.0335, 0.0345, 0.0394, 0.0358, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:45:44,778 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 13:45:52,567 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109850.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:45:55,077 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2912, 2.1053, 1.6988, 1.8408, 2.2625, 1.7804, 2.5222, 1.5130], + device='cuda:0'), covar=tensor([0.3317, 0.1910, 0.4990, 0.3221, 0.1438, 0.2435, 0.1689, 0.4264], + device='cuda:0'), in_proj_covar=tensor([0.0335, 0.0343, 0.0422, 0.0349, 0.0377, 0.0372, 0.0369, 0.0414], + device='cuda:0'), out_proj_covar=tensor([9.9675e-05, 1.0282e-04, 1.2819e-04, 1.0544e-04, 1.1237e-04, 1.1102e-04, + 1.0883e-04, 1.2509e-04], device='cuda:0') +2023-04-27 13:45:55,735 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.37 vs. limit=5.0 +2023-04-27 13:45:58,961 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.126e+02 1.623e+02 1.911e+02 2.262e+02 3.883e+02, threshold=3.822e+02, percent-clipped=2.0 +2023-04-27 13:46:08,145 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3022, 1.2666, 1.5888, 1.6122, 1.2344, 1.2059, 1.3033, 0.9510], + device='cuda:0'), covar=tensor([0.0561, 0.0607, 0.0408, 0.0515, 0.0708, 0.1022, 0.0536, 0.0572], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0067, 0.0067, 0.0074, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 13:46:11,418 INFO [finetune.py:976] (0/7) Epoch 20, batch 1050, loss[loss=0.1461, simple_loss=0.2285, pruned_loss=0.03187, over 4864.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2462, pruned_loss=0.05181, over 948189.52 frames. ], batch size: 34, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:46:16,804 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6165, 3.6377, 2.6332, 4.2031, 3.6374, 3.6542, 1.4296, 3.5518], + device='cuda:0'), covar=tensor([0.1609, 0.1173, 0.3077, 0.1713, 0.3624, 0.1625, 0.5926, 0.2258], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0215, 0.0249, 0.0304, 0.0295, 0.0246, 0.0272, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:46:21,126 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.40 vs. limit=5.0 +2023-04-27 13:46:25,235 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=109898.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:46:37,214 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0336, 2.6853, 2.2327, 2.5724, 1.9562, 2.2784, 2.2529, 1.8474], + device='cuda:0'), covar=tensor([0.1971, 0.1078, 0.0781, 0.1066, 0.3009, 0.1112, 0.1762, 0.2330], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0304, 0.0218, 0.0280, 0.0314, 0.0260, 0.0252, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1619e-04, 1.2078e-04, 8.6318e-05, 1.1092e-04, 1.2746e-04, 1.0315e-04, + 1.0188e-04, 1.0498e-04], device='cuda:0') +2023-04-27 13:46:43,732 INFO [finetune.py:976] (0/7) Epoch 20, batch 1100, loss[loss=0.1835, simple_loss=0.2566, pruned_loss=0.05522, over 4821.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2469, pruned_loss=0.05127, over 950101.72 frames. ], batch size: 30, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:46:50,144 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=109935.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:46:59,845 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109950.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:47:05,248 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.994e+01 1.686e+02 1.942e+02 2.391e+02 4.510e+02, threshold=3.883e+02, percent-clipped=3.0 +2023-04-27 13:47:07,420 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-27 13:47:16,118 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=109974.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:47:17,880 INFO [finetune.py:976] (0/7) Epoch 20, batch 1150, loss[loss=0.1655, simple_loss=0.2337, pruned_loss=0.04869, over 4859.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2469, pruned_loss=0.05093, over 951289.38 frames. ], batch size: 31, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:47:40,679 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=109998.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:47:41,991 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-110000.pt +2023-04-27 13:47:46,320 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9633, 1.4845, 1.7771, 1.7738, 1.7218, 1.4607, 0.7565, 1.4341], + device='cuda:0'), covar=tensor([0.3366, 0.3129, 0.1816, 0.2189, 0.2617, 0.2728, 0.4385, 0.2102], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0245, 0.0225, 0.0314, 0.0217, 0.0231, 0.0227, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 13:47:52,391 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110015.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:47:54,547 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110017.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:47:57,536 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=110022.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:48:00,998 INFO [finetune.py:976] (0/7) Epoch 20, batch 1200, loss[loss=0.1699, simple_loss=0.2483, pruned_loss=0.04574, over 4886.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.247, pruned_loss=0.05154, over 951983.46 frames. ], batch size: 43, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:48:15,940 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0882, 2.4895, 0.9716, 1.3184, 1.8941, 1.2075, 2.9838, 1.5687], + device='cuda:0'), covar=tensor([0.0649, 0.0538, 0.0743, 0.1183, 0.0454, 0.1001, 0.0255, 0.0656], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0049, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 13:48:17,796 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110044.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:48:37,468 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.027e+02 1.685e+02 1.958e+02 2.263e+02 7.161e+02, threshold=3.916e+02, percent-clipped=1.0 +2023-04-27 13:48:46,804 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1080, 2.6527, 2.1981, 2.1860, 1.5238, 1.5165, 2.3336, 1.5206], + device='cuda:0'), covar=tensor([0.1473, 0.1331, 0.1241, 0.1515, 0.2083, 0.1701, 0.0787, 0.1823], + device='cuda:0'), in_proj_covar=tensor([0.0194, 0.0209, 0.0167, 0.0202, 0.0199, 0.0183, 0.0154, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 13:49:01,342 INFO [finetune.py:976] (0/7) Epoch 20, batch 1250, loss[loss=0.1682, simple_loss=0.2365, pruned_loss=0.04996, over 4802.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2449, pruned_loss=0.05097, over 951774.09 frames. ], batch size: 29, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:49:02,089 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110078.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:49:02,764 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.27 vs. limit=5.0 +2023-04-27 13:49:25,652 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110105.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:49:28,699 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110110.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:49:31,768 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110115.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:49:40,411 INFO [finetune.py:976] (0/7) Epoch 20, batch 1300, loss[loss=0.1569, simple_loss=0.229, pruned_loss=0.04238, over 4825.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2423, pruned_loss=0.05011, over 952081.26 frames. ], batch size: 40, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:49:45,458 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9589, 1.8516, 2.2588, 2.5169, 1.8806, 1.6359, 1.9953, 1.1426], + device='cuda:0'), covar=tensor([0.0620, 0.0817, 0.0495, 0.0835, 0.0733, 0.1079, 0.0670, 0.0706], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0067, 0.0067, 0.0074, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 13:50:01,795 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.513e+02 1.801e+02 2.249e+02 8.027e+02, threshold=3.601e+02, percent-clipped=2.0 +2023-04-27 13:50:09,719 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110171.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:50:13,278 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110176.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:50:13,763 INFO [finetune.py:976] (0/7) Epoch 20, batch 1350, loss[loss=0.1551, simple_loss=0.2177, pruned_loss=0.04629, over 4211.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2424, pruned_loss=0.05069, over 950801.83 frames. ], batch size: 18, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:50:34,765 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8419, 2.2774, 1.7869, 1.5686, 1.3534, 1.3646, 1.7512, 1.2891], + device='cuda:0'), covar=tensor([0.1701, 0.1311, 0.1452, 0.1689, 0.2366, 0.1989, 0.1037, 0.2076], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0211, 0.0168, 0.0203, 0.0200, 0.0185, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 13:50:41,461 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4752, 1.7661, 1.8623, 1.9562, 1.8572, 1.9355, 1.9823, 1.9415], + device='cuda:0'), covar=tensor([0.4284, 0.5708, 0.4815, 0.4852, 0.5348, 0.7066, 0.5373, 0.4936], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0372, 0.0322, 0.0335, 0.0343, 0.0393, 0.0356, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 13:50:42,720 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 13:50:47,126 INFO [finetune.py:976] (0/7) Epoch 20, batch 1400, loss[loss=0.1369, simple_loss=0.2249, pruned_loss=0.02444, over 4813.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2451, pruned_loss=0.05114, over 951386.41 frames. ], batch size: 38, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:50:52,587 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110235.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:51:09,025 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.619e+02 1.811e+02 2.225e+02 6.587e+02, threshold=3.621e+02, percent-clipped=5.0 +2023-04-27 13:51:13,015 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 13:51:15,270 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4711, 2.1172, 2.3999, 2.9470, 2.3949, 1.9877, 1.7646, 2.2811], + device='cuda:0'), covar=tensor([0.3454, 0.3072, 0.1736, 0.2670, 0.2734, 0.2619, 0.3985, 0.2133], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0245, 0.0227, 0.0316, 0.0218, 0.0232, 0.0228, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 13:51:19,999 INFO [finetune.py:976] (0/7) Epoch 20, batch 1450, loss[loss=0.1523, simple_loss=0.232, pruned_loss=0.03624, over 4852.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2466, pruned_loss=0.05159, over 952147.49 frames. ], batch size: 49, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:51:20,178 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 13:51:25,116 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=110283.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:51:25,178 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110283.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:51:38,769 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110303.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:51:46,538 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110315.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:51:48,410 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110318.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:51:53,750 INFO [finetune.py:976] (0/7) Epoch 20, batch 1500, loss[loss=0.1855, simple_loss=0.2618, pruned_loss=0.05465, over 4865.00 frames. ], tot_loss[loss=0.1762, simple_loss=0.2483, pruned_loss=0.05206, over 953208.42 frames. ], batch size: 34, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:52:03,862 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-27 13:52:05,650 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110344.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:52:16,150 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.560e+02 1.895e+02 2.185e+02 4.822e+02, threshold=3.791e+02, percent-clipped=2.0 +2023-04-27 13:52:19,127 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=110363.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:52:19,789 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110364.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:52:22,461 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.48 vs. limit=5.0 +2023-04-27 13:52:25,269 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110373.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:52:27,673 INFO [finetune.py:976] (0/7) Epoch 20, batch 1550, loss[loss=0.1397, simple_loss=0.2291, pruned_loss=0.02513, over 4792.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2485, pruned_loss=0.05159, over 955457.58 frames. ], batch size: 26, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:52:29,083 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110379.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:52:51,528 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-04-27 13:53:00,007 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110400.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:53:22,804 INFO [finetune.py:976] (0/7) Epoch 20, batch 1600, loss[loss=0.1631, simple_loss=0.2351, pruned_loss=0.04559, over 4893.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2451, pruned_loss=0.05071, over 954738.86 frames. ], batch size: 37, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:53:33,896 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7746, 1.7802, 2.1427, 2.3311, 1.7647, 1.5213, 1.8061, 1.0876], + device='cuda:0'), covar=tensor([0.0673, 0.0635, 0.0545, 0.0727, 0.0747, 0.1200, 0.0871, 0.0725], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0067, 0.0067, 0.0074, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 13:54:08,138 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.546e+02 1.968e+02 2.341e+02 5.862e+02, threshold=3.936e+02, percent-clipped=2.0 +2023-04-27 13:54:10,750 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6595, 1.6613, 0.7165, 1.3346, 1.7816, 1.5276, 1.4007, 1.4918], + device='cuda:0'), covar=tensor([0.0456, 0.0343, 0.0364, 0.0538, 0.0281, 0.0470, 0.0465, 0.0508], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0020, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 13:54:12,538 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110466.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:54:21,390 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110471.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:54:30,173 INFO [finetune.py:976] (0/7) Epoch 20, batch 1650, loss[loss=0.1369, simple_loss=0.2179, pruned_loss=0.02802, over 4782.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2432, pruned_loss=0.05013, over 953260.76 frames. ], batch size: 29, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:55:01,200 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9431, 2.3292, 0.8215, 1.2699, 1.7509, 1.1999, 2.5178, 1.3551], + device='cuda:0'), covar=tensor([0.0710, 0.0532, 0.0634, 0.1309, 0.0418, 0.1036, 0.0372, 0.0730], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 13:55:09,363 INFO [finetune.py:976] (0/7) Epoch 20, batch 1700, loss[loss=0.2394, simple_loss=0.3012, pruned_loss=0.08881, over 4742.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2422, pruned_loss=0.05076, over 954494.93 frames. ], batch size: 54, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:55:27,736 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0108, 2.5480, 1.0229, 1.3784, 2.0117, 1.3017, 3.4557, 1.7140], + device='cuda:0'), covar=tensor([0.0736, 0.0668, 0.0817, 0.1286, 0.0530, 0.1007, 0.0203, 0.0689], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 13:55:31,158 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.338e+01 1.550e+02 1.876e+02 2.294e+02 4.137e+02, threshold=3.752e+02, percent-clipped=2.0 +2023-04-27 13:55:41,518 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 13:55:43,125 INFO [finetune.py:976] (0/7) Epoch 20, batch 1750, loss[loss=0.199, simple_loss=0.2721, pruned_loss=0.06298, over 4906.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.245, pruned_loss=0.05179, over 954531.09 frames. ], batch size: 36, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:56:06,137 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110611.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:56:17,235 INFO [finetune.py:976] (0/7) Epoch 20, batch 1800, loss[loss=0.1555, simple_loss=0.2088, pruned_loss=0.05113, over 4389.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2462, pruned_loss=0.05124, over 955717.50 frames. ], batch size: 19, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:56:24,702 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110639.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:56:31,155 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4647, 2.3464, 2.4942, 2.8443, 3.0259, 2.3697, 1.9672, 2.7029], + device='cuda:0'), covar=tensor([0.0829, 0.0852, 0.0620, 0.0559, 0.0522, 0.0859, 0.0729, 0.0483], + device='cuda:0'), in_proj_covar=tensor([0.0188, 0.0201, 0.0184, 0.0173, 0.0179, 0.0182, 0.0154, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:56:38,212 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.091e+02 1.583e+02 1.893e+02 2.198e+02 3.811e+02, threshold=3.786e+02, percent-clipped=1.0 +2023-04-27 13:56:38,298 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110659.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:56:47,721 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110672.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:56:48,304 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110673.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:56:48,923 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110674.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:56:50,675 INFO [finetune.py:976] (0/7) Epoch 20, batch 1850, loss[loss=0.1464, simple_loss=0.2275, pruned_loss=0.03268, over 4743.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2461, pruned_loss=0.05134, over 953972.36 frames. ], batch size: 54, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:57:06,434 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110700.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:57:21,093 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=110721.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:57:24,742 INFO [finetune.py:976] (0/7) Epoch 20, batch 1900, loss[loss=0.2021, simple_loss=0.2784, pruned_loss=0.06287, over 4813.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2478, pruned_loss=0.05202, over 952503.43 frames. ], batch size: 33, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:57:29,653 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2677, 1.2043, 1.5286, 1.5176, 1.1938, 1.1508, 1.2015, 0.8857], + device='cuda:0'), covar=tensor([0.0513, 0.0558, 0.0337, 0.0523, 0.0703, 0.0981, 0.0481, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0069, 0.0067, 0.0068, 0.0075, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 13:57:30,859 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110736.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:57:38,119 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=110748.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:57:46,352 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.050e+01 1.575e+02 1.913e+02 2.383e+02 8.631e+02, threshold=3.825e+02, percent-clipped=5.0 +2023-04-27 13:57:51,430 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110766.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:57:53,317 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8843, 1.9323, 1.7597, 1.5172, 2.0676, 1.6478, 2.6412, 1.5379], + device='cuda:0'), covar=tensor([0.3604, 0.1936, 0.4738, 0.2806, 0.1535, 0.2365, 0.1145, 0.4318], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0347, 0.0427, 0.0353, 0.0383, 0.0375, 0.0373, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 13:57:55,025 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110771.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:57:59,103 INFO [finetune.py:976] (0/7) Epoch 20, batch 1950, loss[loss=0.218, simple_loss=0.2723, pruned_loss=0.08188, over 4809.00 frames. ], tot_loss[loss=0.1744, simple_loss=0.2462, pruned_loss=0.05125, over 953947.86 frames. ], batch size: 41, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:57:59,829 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2071, 2.7541, 2.2475, 2.2214, 1.5647, 1.6307, 2.3853, 1.5241], + device='cuda:0'), covar=tensor([0.1455, 0.1364, 0.1314, 0.1534, 0.2103, 0.1728, 0.0822, 0.1775], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0211, 0.0168, 0.0203, 0.0200, 0.0184, 0.0155, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 13:58:29,602 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110797.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 13:58:30,826 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4271, 2.9714, 0.9969, 1.5531, 2.1720, 1.2615, 3.8213, 1.8468], + device='cuda:0'), covar=tensor([0.0640, 0.0767, 0.0840, 0.1159, 0.0470, 0.0962, 0.0199, 0.0597], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0066, 0.0048, 0.0047, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 13:58:30,835 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3985, 1.7927, 1.6480, 1.9882, 1.9293, 2.0462, 1.5583, 4.0275], + device='cuda:0'), covar=tensor([0.0521, 0.0762, 0.0689, 0.1098, 0.0566, 0.0506, 0.0706, 0.0121], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 13:58:52,663 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=110814.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:59:01,935 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=110819.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:59:11,701 INFO [finetune.py:976] (0/7) Epoch 20, batch 2000, loss[loss=0.1903, simple_loss=0.2525, pruned_loss=0.06404, over 4814.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2433, pruned_loss=0.05032, over 952707.85 frames. ], batch size: 38, lr: 3.25e-03, grad_scale: 32.0 +2023-04-27 13:59:16,761 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8501, 2.3597, 2.0537, 1.7337, 1.3630, 1.4751, 2.1851, 1.3560], + device='cuda:0'), covar=tensor([0.1756, 0.1586, 0.1471, 0.1865, 0.2372, 0.1958, 0.0894, 0.2011], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0211, 0.0168, 0.0203, 0.0200, 0.0184, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 13:59:33,914 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.034e+02 1.549e+02 1.813e+02 2.155e+02 3.715e+02, threshold=3.626e+02, percent-clipped=0.0 +2023-04-27 13:59:34,159 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-27 13:59:45,771 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110868.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 13:59:56,923 INFO [finetune.py:976] (0/7) Epoch 20, batch 2050, loss[loss=0.1912, simple_loss=0.2654, pruned_loss=0.05849, over 4848.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2416, pruned_loss=0.05056, over 950643.28 frames. ], batch size: 49, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:00:46,825 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8506, 1.4230, 1.4886, 1.6859, 2.0923, 1.6390, 1.4209, 1.4459], + device='cuda:0'), covar=tensor([0.1629, 0.1602, 0.1958, 0.1343, 0.0892, 0.1829, 0.1930, 0.2227], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0310, 0.0349, 0.0288, 0.0324, 0.0307, 0.0299, 0.0370], + device='cuda:0'), out_proj_covar=tensor([6.3487e-05, 6.4181e-05, 7.3894e-05, 5.8241e-05, 6.7137e-05, 6.4303e-05, + 6.2664e-05, 7.8607e-05], device='cuda:0') +2023-04-27 14:00:53,298 INFO [finetune.py:976] (0/7) Epoch 20, batch 2100, loss[loss=0.2018, simple_loss=0.2745, pruned_loss=0.06453, over 4863.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2413, pruned_loss=0.05098, over 950252.18 frames. ], batch size: 44, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:00:54,646 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=110929.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:01:01,695 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110939.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:01:13,870 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=110958.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:01:14,355 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.601e+02 1.866e+02 2.314e+02 3.983e+02, threshold=3.732e+02, percent-clipped=4.0 +2023-04-27 14:01:14,453 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110959.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:01:20,708 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=110967.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:01:24,417 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3884, 3.4942, 0.8264, 1.7071, 1.7899, 2.5048, 1.9466, 0.9748], + device='cuda:0'), covar=tensor([0.1472, 0.0957, 0.2043, 0.1354, 0.1149, 0.0936, 0.1499, 0.2054], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0242, 0.0138, 0.0120, 0.0134, 0.0152, 0.0118, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 14:01:25,020 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=110974.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:01:27,216 INFO [finetune.py:976] (0/7) Epoch 20, batch 2150, loss[loss=0.1711, simple_loss=0.2415, pruned_loss=0.05036, over 4691.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2452, pruned_loss=0.0519, over 950240.85 frames. ], batch size: 23, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:01:34,418 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=110987.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:01:47,100 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=111007.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:01:55,841 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111019.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:01:57,631 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=111022.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:02:00,613 INFO [finetune.py:976] (0/7) Epoch 20, batch 2200, loss[loss=0.1746, simple_loss=0.2458, pruned_loss=0.05167, over 4819.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2482, pruned_loss=0.05274, over 949566.91 frames. ], batch size: 33, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:02:20,443 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8152, 2.4219, 1.9452, 1.8466, 1.3788, 1.3731, 1.9933, 1.3390], + device='cuda:0'), covar=tensor([0.1569, 0.1326, 0.1319, 0.1624, 0.2234, 0.1840, 0.0929, 0.1955], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0210, 0.0168, 0.0202, 0.0199, 0.0184, 0.0154, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 14:02:22,144 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.590e+02 1.963e+02 2.328e+02 5.181e+02, threshold=3.927e+02, percent-clipped=3.0 +2023-04-27 14:02:34,619 INFO [finetune.py:976] (0/7) Epoch 20, batch 2250, loss[loss=0.1889, simple_loss=0.2751, pruned_loss=0.05135, over 4820.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2486, pruned_loss=0.05242, over 951034.31 frames. ], batch size: 38, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:02:44,859 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111092.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 14:02:44,920 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6716, 2.8197, 2.0237, 2.4450, 2.7193, 2.2282, 3.5901, 1.9868], + device='cuda:0'), covar=tensor([0.3766, 0.1918, 0.4924, 0.3144, 0.1807, 0.2759, 0.1278, 0.4543], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0348, 0.0429, 0.0354, 0.0384, 0.0376, 0.0374, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:03:08,137 INFO [finetune.py:976] (0/7) Epoch 20, batch 2300, loss[loss=0.1288, simple_loss=0.1998, pruned_loss=0.02891, over 4002.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2472, pruned_loss=0.05125, over 952083.61 frames. ], batch size: 17, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:03:22,722 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1604, 2.2611, 1.9486, 1.8819, 2.3961, 1.8808, 2.9309, 1.7709], + device='cuda:0'), covar=tensor([0.4046, 0.1993, 0.4719, 0.3157, 0.1793, 0.2605, 0.1416, 0.4423], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0351, 0.0431, 0.0356, 0.0385, 0.0378, 0.0376, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:03:28,739 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6501, 0.7741, 1.5671, 1.9794, 1.7022, 1.5664, 1.5921, 1.5716], + device='cuda:0'), covar=tensor([0.3924, 0.5724, 0.5412, 0.5347, 0.4940, 0.6547, 0.6139, 0.7495], + device='cuda:0'), in_proj_covar=tensor([0.0428, 0.0410, 0.0504, 0.0508, 0.0455, 0.0484, 0.0490, 0.0496], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:03:29,173 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.334e+01 1.540e+02 1.823e+02 2.154e+02 3.608e+02, threshold=3.645e+02, percent-clipped=0.0 +2023-04-27 14:03:36,390 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1981, 1.5965, 1.4919, 1.7789, 1.6843, 1.9267, 1.4225, 3.4794], + device='cuda:0'), covar=tensor([0.0632, 0.0737, 0.0746, 0.1172, 0.0586, 0.0543, 0.0734, 0.0168], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 14:03:41,038 INFO [finetune.py:976] (0/7) Epoch 20, batch 2350, loss[loss=0.14, simple_loss=0.2089, pruned_loss=0.03556, over 4713.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.245, pruned_loss=0.05031, over 952093.91 frames. ], batch size: 23, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:04:27,659 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111224.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:04:28,311 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111225.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:04:29,454 INFO [finetune.py:976] (0/7) Epoch 20, batch 2400, loss[loss=0.1409, simple_loss=0.204, pruned_loss=0.03892, over 4723.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2428, pruned_loss=0.04998, over 949022.99 frames. ], batch size: 23, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:04:37,941 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9951, 2.6358, 1.0695, 1.2821, 1.6983, 1.1519, 3.3273, 1.6115], + device='cuda:0'), covar=tensor([0.0916, 0.0955, 0.1025, 0.1722, 0.0735, 0.1410, 0.0329, 0.0896], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0047, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 14:05:01,009 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1335, 1.6356, 5.2578, 4.9044, 4.5837, 5.1251, 4.6855, 4.5955], + device='cuda:0'), covar=tensor([0.6610, 0.5825, 0.0970, 0.1687, 0.1064, 0.1510, 0.1083, 0.1568], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0306, 0.0408, 0.0408, 0.0351, 0.0409, 0.0315, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:05:02,142 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.830e+01 1.579e+02 1.923e+02 2.296e+02 7.513e+02, threshold=3.847e+02, percent-clipped=3.0 +2023-04-27 14:05:07,617 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111267.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:05:19,550 INFO [finetune.py:976] (0/7) Epoch 20, batch 2450, loss[loss=0.2111, simple_loss=0.263, pruned_loss=0.07958, over 4759.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2391, pruned_loss=0.04893, over 950927.34 frames. ], batch size: 54, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:05:24,912 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0978, 2.4007, 1.0247, 1.3646, 1.7824, 1.2507, 3.0414, 1.6458], + device='cuda:0'), covar=tensor([0.0673, 0.0606, 0.0670, 0.1191, 0.0471, 0.0996, 0.0239, 0.0629], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0049, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 14:05:27,983 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3079, 1.3437, 1.3811, 1.6168, 1.6923, 1.2319, 0.8977, 1.4300], + device='cuda:0'), covar=tensor([0.0878, 0.1274, 0.0899, 0.0660, 0.0664, 0.0914, 0.0883, 0.0662], + device='cuda:0'), in_proj_covar=tensor([0.0189, 0.0203, 0.0185, 0.0174, 0.0180, 0.0182, 0.0155, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:05:36,567 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111286.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:06:05,507 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111314.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:06:06,087 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=111315.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:06:06,280 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-27 14:06:13,839 INFO [finetune.py:976] (0/7) Epoch 20, batch 2500, loss[loss=0.2088, simple_loss=0.284, pruned_loss=0.06683, over 4854.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2425, pruned_loss=0.05062, over 951381.61 frames. ], batch size: 44, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:06:35,391 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.107e+02 1.671e+02 1.878e+02 2.269e+02 4.413e+02, threshold=3.755e+02, percent-clipped=2.0 +2023-04-27 14:06:46,741 INFO [finetune.py:976] (0/7) Epoch 20, batch 2550, loss[loss=0.1785, simple_loss=0.2603, pruned_loss=0.04839, over 4855.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2452, pruned_loss=0.05135, over 951264.18 frames. ], batch size: 31, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:06:57,265 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111392.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 14:07:20,615 INFO [finetune.py:976] (0/7) Epoch 20, batch 2600, loss[loss=0.1505, simple_loss=0.2228, pruned_loss=0.0391, over 4799.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2465, pruned_loss=0.05154, over 953484.49 frames. ], batch size: 40, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:07:29,126 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=111440.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:07:30,865 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0081, 2.3620, 1.0653, 1.3459, 1.7556, 1.1983, 2.8999, 1.5858], + device='cuda:0'), covar=tensor([0.0603, 0.0598, 0.0721, 0.1087, 0.0424, 0.0939, 0.0253, 0.0561], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 14:07:42,989 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.116e+02 1.630e+02 1.917e+02 2.393e+02 4.047e+02, threshold=3.834e+02, percent-clipped=2.0 +2023-04-27 14:07:48,660 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111468.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:07:54,482 INFO [finetune.py:976] (0/7) Epoch 20, batch 2650, loss[loss=0.1943, simple_loss=0.2724, pruned_loss=0.05809, over 4804.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2483, pruned_loss=0.05189, over 953133.33 frames. ], batch size: 40, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:08:26,347 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111524.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:08:28,085 INFO [finetune.py:976] (0/7) Epoch 20, batch 2700, loss[loss=0.1627, simple_loss=0.2423, pruned_loss=0.0415, over 4924.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2479, pruned_loss=0.05149, over 954637.13 frames. ], batch size: 33, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:08:29,411 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111529.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:08:49,485 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.017e+02 1.491e+02 1.860e+02 2.271e+02 3.945e+02, threshold=3.719e+02, percent-clipped=1.0 +2023-04-27 14:08:58,398 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=111572.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:09:01,907 INFO [finetune.py:976] (0/7) Epoch 20, batch 2750, loss[loss=0.1565, simple_loss=0.2252, pruned_loss=0.04389, over 4805.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2449, pruned_loss=0.05095, over 954120.74 frames. ], batch size: 25, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:09:04,322 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111581.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:09:07,371 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6793, 1.3424, 1.3812, 1.5182, 1.9018, 1.5233, 1.2832, 1.2889], + device='cuda:0'), covar=tensor([0.1514, 0.1063, 0.1563, 0.1092, 0.0653, 0.1525, 0.1718, 0.1794], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0309, 0.0349, 0.0287, 0.0325, 0.0305, 0.0299, 0.0369], + device='cuda:0'), out_proj_covar=tensor([6.3620e-05, 6.4035e-05, 7.3947e-05, 5.7896e-05, 6.7423e-05, 6.3934e-05, + 6.2745e-05, 7.8404e-05], device='cuda:0') +2023-04-27 14:09:08,610 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2872, 2.9338, 2.1876, 2.3007, 1.5081, 1.5760, 2.4029, 1.5332], + device='cuda:0'), covar=tensor([0.1739, 0.1455, 0.1405, 0.1697, 0.2484, 0.2038, 0.1013, 0.2060], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0211, 0.0169, 0.0204, 0.0200, 0.0186, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 14:09:26,666 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-27 14:09:37,578 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111614.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:09:37,727 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-27 14:09:57,463 INFO [finetune.py:976] (0/7) Epoch 20, batch 2800, loss[loss=0.1601, simple_loss=0.238, pruned_loss=0.04112, over 4760.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2425, pruned_loss=0.05041, over 956519.45 frames. ], batch size: 27, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:10:24,459 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.707e+01 1.587e+02 1.829e+02 2.177e+02 6.182e+02, threshold=3.659e+02, percent-clipped=2.0 +2023-04-27 14:10:26,835 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=111662.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:10:36,931 INFO [finetune.py:976] (0/7) Epoch 20, batch 2850, loss[loss=0.1862, simple_loss=0.2545, pruned_loss=0.05893, over 4911.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2409, pruned_loss=0.04995, over 956679.20 frames. ], batch size: 37, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:11:27,904 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9234, 1.1916, 3.2508, 3.0077, 2.9078, 3.1972, 3.1331, 2.8582], + device='cuda:0'), covar=tensor([0.7470, 0.5586, 0.1476, 0.2074, 0.1452, 0.2112, 0.2136, 0.1886], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0308, 0.0409, 0.0410, 0.0354, 0.0411, 0.0317, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:11:31,378 INFO [finetune.py:976] (0/7) Epoch 20, batch 2900, loss[loss=0.1637, simple_loss=0.2292, pruned_loss=0.04903, over 4162.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2438, pruned_loss=0.05082, over 956447.97 frames. ], batch size: 18, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:12:12,497 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.619e+02 1.984e+02 2.443e+02 6.162e+02, threshold=3.968e+02, percent-clipped=7.0 +2023-04-27 14:12:13,416 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.69 vs. limit=5.0 +2023-04-27 14:12:36,595 INFO [finetune.py:976] (0/7) Epoch 20, batch 2950, loss[loss=0.168, simple_loss=0.2354, pruned_loss=0.05026, over 4901.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2471, pruned_loss=0.0519, over 956532.06 frames. ], batch size: 32, lr: 3.24e-03, grad_scale: 64.0 +2023-04-27 14:12:56,804 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=111791.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 14:13:17,095 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-27 14:13:29,944 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=111824.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:13:32,109 INFO [finetune.py:976] (0/7) Epoch 20, batch 3000, loss[loss=0.1692, simple_loss=0.2413, pruned_loss=0.04859, over 4925.00 frames. ], tot_loss[loss=0.1768, simple_loss=0.2485, pruned_loss=0.05254, over 955788.52 frames. ], batch size: 33, lr: 3.24e-03, grad_scale: 64.0 +2023-04-27 14:13:32,110 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 14:13:34,007 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3994, 1.3245, 1.6263, 1.6582, 1.2991, 1.2631, 1.3416, 0.9191], + device='cuda:0'), covar=tensor([0.0570, 0.0526, 0.0409, 0.0462, 0.0841, 0.1209, 0.0558, 0.0510], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0069, 0.0067, 0.0067, 0.0075, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 14:13:35,179 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3927, 1.3716, 3.8962, 3.5550, 3.5286, 3.7962, 3.8653, 3.4528], + device='cuda:0'), covar=tensor([0.7144, 0.5204, 0.1342, 0.2258, 0.1299, 0.1342, 0.0843, 0.1900], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0309, 0.0409, 0.0409, 0.0354, 0.0411, 0.0317, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:13:35,941 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3880, 1.2822, 1.6091, 1.6458, 1.2379, 1.2418, 1.2878, 0.9017], + device='cuda:0'), covar=tensor([0.0631, 0.0722, 0.0434, 0.0492, 0.0890, 0.1201, 0.0598, 0.0603], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0069, 0.0067, 0.0067, 0.0075, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 14:13:49,411 INFO [finetune.py:1010] (0/7) Epoch 20, validation: loss=0.1527, simple_loss=0.2229, pruned_loss=0.04123, over 2265189.00 frames. +2023-04-27 14:13:49,412 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 14:14:05,951 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2935, 1.4951, 1.4753, 1.7059, 1.5935, 1.8067, 1.4344, 3.3544], + device='cuda:0'), covar=tensor([0.0569, 0.0775, 0.0734, 0.1179, 0.0607, 0.0497, 0.0701, 0.0137], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 14:14:20,686 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=111852.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 14:14:26,909 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3224, 2.3256, 1.9519, 1.9246, 2.4535, 1.9572, 2.9016, 1.8157], + device='cuda:0'), covar=tensor([0.3252, 0.1907, 0.4335, 0.3227, 0.1501, 0.2328, 0.1236, 0.3667], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0348, 0.0423, 0.0353, 0.0380, 0.0373, 0.0372, 0.0415], + device='cuda:0'), out_proj_covar=tensor([9.9896e-05, 1.0422e-04, 1.2856e-04, 1.0653e-04, 1.1339e-04, 1.1155e-04, + 1.0952e-04, 1.2556e-04], device='cuda:0') +2023-04-27 14:14:29,829 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.150e+02 1.660e+02 1.972e+02 2.446e+02 4.506e+02, threshold=3.944e+02, percent-clipped=2.0 +2023-04-27 14:14:37,784 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3626, 1.5044, 1.4870, 1.6165, 1.5970, 1.7542, 1.3904, 3.3644], + device='cuda:0'), covar=tensor([0.0594, 0.0830, 0.0756, 0.1238, 0.0617, 0.0507, 0.0754, 0.0148], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 14:14:58,620 INFO [finetune.py:976] (0/7) Epoch 20, batch 3050, loss[loss=0.1756, simple_loss=0.2418, pruned_loss=0.0547, over 4867.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.25, pruned_loss=0.05252, over 958072.19 frames. ], batch size: 31, lr: 3.24e-03, grad_scale: 64.0 +2023-04-27 14:15:01,113 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=111881.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:15:15,718 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8319, 1.4136, 1.8943, 2.2840, 1.9405, 1.7846, 1.8395, 1.8249], + device='cuda:0'), covar=tensor([0.4307, 0.6517, 0.6416, 0.5940, 0.5699, 0.7428, 0.7631, 0.8869], + device='cuda:0'), in_proj_covar=tensor([0.0429, 0.0411, 0.0506, 0.0509, 0.0457, 0.0486, 0.0493, 0.0497], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:15:37,735 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6413, 1.1831, 4.3337, 4.0911, 3.7907, 4.0553, 3.9352, 3.8494], + device='cuda:0'), covar=tensor([0.7191, 0.6110, 0.0981, 0.1451, 0.1103, 0.1700, 0.2105, 0.1438], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0309, 0.0409, 0.0409, 0.0354, 0.0411, 0.0317, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:15:39,204 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 14:15:48,558 INFO [finetune.py:976] (0/7) Epoch 20, batch 3100, loss[loss=0.128, simple_loss=0.2063, pruned_loss=0.02489, over 4768.00 frames. ], tot_loss[loss=0.1763, simple_loss=0.2484, pruned_loss=0.0521, over 957190.35 frames. ], batch size: 28, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:15:50,330 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=111929.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:15:52,207 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8147, 1.6520, 1.4168, 1.7397, 2.1122, 1.7158, 1.4787, 1.3077], + device='cuda:0'), covar=tensor([0.1458, 0.1125, 0.1781, 0.1025, 0.0723, 0.1333, 0.1743, 0.1990], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0309, 0.0349, 0.0288, 0.0325, 0.0306, 0.0299, 0.0369], + device='cuda:0'), out_proj_covar=tensor([6.3505e-05, 6.4028e-05, 7.3963e-05, 5.8112e-05, 6.7279e-05, 6.4163e-05, + 6.2741e-05, 7.8451e-05], device='cuda:0') +2023-04-27 14:16:16,416 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.458e+02 1.721e+02 2.109e+02 3.576e+02, threshold=3.441e+02, percent-clipped=0.0 +2023-04-27 14:16:27,780 INFO [finetune.py:976] (0/7) Epoch 20, batch 3150, loss[loss=0.1759, simple_loss=0.2466, pruned_loss=0.05255, over 4768.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.245, pruned_loss=0.05085, over 958122.25 frames. ], batch size: 28, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:16:43,379 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-112000.pt +2023-04-27 14:17:02,006 INFO [finetune.py:976] (0/7) Epoch 20, batch 3200, loss[loss=0.223, simple_loss=0.2738, pruned_loss=0.08613, over 4260.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2413, pruned_loss=0.04975, over 957696.16 frames. ], batch size: 65, lr: 3.24e-03, grad_scale: 32.0 +2023-04-27 14:17:35,522 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.584e+02 1.831e+02 2.274e+02 4.743e+02, threshold=3.663e+02, percent-clipped=4.0 +2023-04-27 14:17:39,724 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112066.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:17:46,878 INFO [finetune.py:976] (0/7) Epoch 20, batch 3250, loss[loss=0.1565, simple_loss=0.2282, pruned_loss=0.04241, over 4909.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2412, pruned_loss=0.05013, over 958624.53 frames. ], batch size: 35, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:18:01,702 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3465, 1.2401, 1.5263, 1.5418, 1.2636, 1.0352, 1.1307, 0.6133], + device='cuda:0'), covar=tensor([0.0548, 0.0515, 0.0361, 0.0474, 0.0658, 0.1329, 0.0563, 0.0717], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0069, 0.0068, 0.0068, 0.0076, 0.0097, 0.0074, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 14:18:18,434 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112124.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:18:19,091 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9123, 1.1411, 1.6437, 1.7495, 1.7320, 1.8024, 1.6541, 1.6627], + device='cuda:0'), covar=tensor([0.4019, 0.5124, 0.4412, 0.4326, 0.5195, 0.6840, 0.4707, 0.4412], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0374, 0.0323, 0.0336, 0.0346, 0.0394, 0.0357, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 14:18:20,138 INFO [finetune.py:976] (0/7) Epoch 20, batch 3300, loss[loss=0.1575, simple_loss=0.2362, pruned_loss=0.03944, over 4765.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.244, pruned_loss=0.0507, over 957989.26 frames. ], batch size: 54, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:18:20,258 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112127.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:18:44,826 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112147.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 14:18:46,147 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-27 14:18:52,672 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.675e+01 1.599e+02 1.882e+02 2.331e+02 3.289e+02, threshold=3.764e+02, percent-clipped=0.0 +2023-04-27 14:19:00,546 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=112172.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:19:04,159 INFO [finetune.py:976] (0/7) Epoch 20, batch 3350, loss[loss=0.1875, simple_loss=0.264, pruned_loss=0.05549, over 4902.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2472, pruned_loss=0.05185, over 959211.90 frames. ], batch size: 36, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:19:37,509 INFO [finetune.py:976] (0/7) Epoch 20, batch 3400, loss[loss=0.1844, simple_loss=0.2423, pruned_loss=0.06323, over 4123.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2492, pruned_loss=0.05245, over 957096.48 frames. ], batch size: 65, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:19:37,693 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 14:20:15,438 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.600e+02 1.895e+02 2.268e+02 5.639e+02, threshold=3.789e+02, percent-clipped=3.0 +2023-04-27 14:20:37,464 INFO [finetune.py:976] (0/7) Epoch 20, batch 3450, loss[loss=0.1776, simple_loss=0.252, pruned_loss=0.05158, over 4828.00 frames. ], tot_loss[loss=0.1778, simple_loss=0.25, pruned_loss=0.05279, over 956201.57 frames. ], batch size: 49, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:20:48,939 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2193, 1.3561, 1.7672, 1.8634, 1.7715, 1.8667, 1.8076, 1.8155], + device='cuda:0'), covar=tensor([0.3842, 0.5192, 0.4195, 0.4047, 0.5323, 0.6760, 0.4777, 0.4511], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0372, 0.0321, 0.0334, 0.0344, 0.0392, 0.0355, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 14:20:50,697 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9034, 2.6361, 1.9714, 1.8082, 1.3578, 1.4234, 2.0918, 1.3488], + device='cuda:0'), covar=tensor([0.1623, 0.1267, 0.1383, 0.1739, 0.2381, 0.1953, 0.0941, 0.1995], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0213, 0.0169, 0.0205, 0.0201, 0.0186, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 14:21:35,779 INFO [finetune.py:976] (0/7) Epoch 20, batch 3500, loss[loss=0.1682, simple_loss=0.232, pruned_loss=0.0522, over 4824.00 frames. ], tot_loss[loss=0.1761, simple_loss=0.2474, pruned_loss=0.05243, over 956339.69 frames. ], batch size: 33, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:21:59,355 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.563e+02 1.868e+02 2.164e+02 4.202e+02, threshold=3.735e+02, percent-clipped=1.0 +2023-04-27 14:22:09,766 INFO [finetune.py:976] (0/7) Epoch 20, batch 3550, loss[loss=0.1777, simple_loss=0.2393, pruned_loss=0.05803, over 4868.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2446, pruned_loss=0.05131, over 957244.60 frames. ], batch size: 34, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:22:15,257 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3641, 1.6697, 1.4895, 1.8478, 1.7466, 1.8892, 1.5339, 3.4789], + device='cuda:0'), covar=tensor([0.0595, 0.0810, 0.0798, 0.1207, 0.0618, 0.0445, 0.0745, 0.0151], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 14:22:18,176 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112389.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:22:40,993 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112422.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:22:44,032 INFO [finetune.py:976] (0/7) Epoch 20, batch 3600, loss[loss=0.1356, simple_loss=0.201, pruned_loss=0.03507, over 4717.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2405, pruned_loss=0.04976, over 956763.55 frames. ], batch size: 23, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:22:52,170 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-27 14:22:57,488 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112447.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 14:22:59,365 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112450.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:23:06,296 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.532e+02 1.802e+02 2.141e+02 3.664e+02, threshold=3.603e+02, percent-clipped=0.0 +2023-04-27 14:23:18,448 INFO [finetune.py:976] (0/7) Epoch 20, batch 3650, loss[loss=0.2088, simple_loss=0.2827, pruned_loss=0.06749, over 4810.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2437, pruned_loss=0.05133, over 954586.13 frames. ], batch size: 40, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:23:30,564 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=112495.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 14:23:58,094 INFO [finetune.py:976] (0/7) Epoch 20, batch 3700, loss[loss=0.2141, simple_loss=0.2789, pruned_loss=0.07467, over 4217.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2475, pruned_loss=0.05223, over 952692.52 frames. ], batch size: 66, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:24:41,300 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7355, 1.1052, 1.3816, 1.4215, 1.7952, 1.4087, 1.1945, 1.3340], + device='cuda:0'), covar=tensor([0.1444, 0.1641, 0.2154, 0.1459, 0.1057, 0.1790, 0.1945, 0.2407], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0313, 0.0353, 0.0290, 0.0329, 0.0309, 0.0303, 0.0374], + device='cuda:0'), out_proj_covar=tensor([6.4391e-05, 6.4910e-05, 7.4554e-05, 5.8676e-05, 6.8069e-05, 6.4747e-05, + 6.3441e-05, 7.9429e-05], device='cuda:0') +2023-04-27 14:24:41,781 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.566e+02 1.857e+02 2.073e+02 3.044e+02, threshold=3.713e+02, percent-clipped=0.0 +2023-04-27 14:24:43,126 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112562.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:25:00,439 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7473, 1.6877, 1.9348, 2.0558, 1.6540, 1.4281, 1.7284, 1.0542], + device='cuda:0'), covar=tensor([0.0650, 0.0667, 0.0559, 0.0826, 0.0789, 0.1064, 0.0663, 0.0684], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0069, 0.0068, 0.0069, 0.0076, 0.0097, 0.0074, 0.0066], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 14:25:05,134 INFO [finetune.py:976] (0/7) Epoch 20, batch 3750, loss[loss=0.2139, simple_loss=0.2818, pruned_loss=0.07299, over 4921.00 frames. ], tot_loss[loss=0.1767, simple_loss=0.2484, pruned_loss=0.05249, over 952899.60 frames. ], batch size: 38, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:25:05,265 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112577.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:26:00,348 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112617.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:26:00,469 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.21 vs. limit=5.0 +2023-04-27 14:26:10,705 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112623.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:26:18,696 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.5151, 3.3387, 2.6299, 4.0344, 3.3735, 3.4605, 1.5480, 3.4664], + device='cuda:0'), covar=tensor([0.1811, 0.1372, 0.3684, 0.1883, 0.2450, 0.1804, 0.5450, 0.2514], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0215, 0.0252, 0.0305, 0.0297, 0.0247, 0.0274, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 14:26:19,269 INFO [finetune.py:976] (0/7) Epoch 20, batch 3800, loss[loss=0.1709, simple_loss=0.2455, pruned_loss=0.04817, over 4743.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2494, pruned_loss=0.05221, over 953397.58 frames. ], batch size: 54, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:26:30,571 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 14:26:31,074 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112638.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 14:27:02,953 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.113e+02 1.520e+02 1.863e+02 2.243e+02 5.060e+02, threshold=3.726e+02, percent-clipped=6.0 +2023-04-27 14:27:25,229 INFO [finetune.py:976] (0/7) Epoch 20, batch 3850, loss[loss=0.1811, simple_loss=0.2456, pruned_loss=0.05827, over 4802.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2476, pruned_loss=0.05115, over 953581.68 frames. ], batch size: 45, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:27:25,960 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112678.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 14:28:09,573 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=112722.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:28:09,598 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8271, 1.4205, 1.4624, 1.6757, 2.0147, 1.6130, 1.3800, 1.4390], + device='cuda:0'), covar=tensor([0.1458, 0.1348, 0.1645, 0.1244, 0.0752, 0.1501, 0.1869, 0.2125], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0313, 0.0351, 0.0290, 0.0328, 0.0308, 0.0302, 0.0374], + device='cuda:0'), out_proj_covar=tensor([6.4336e-05, 6.4798e-05, 7.4288e-05, 5.8574e-05, 6.7831e-05, 6.4521e-05, + 6.3251e-05, 7.9515e-05], device='cuda:0') +2023-04-27 14:28:13,013 INFO [finetune.py:976] (0/7) Epoch 20, batch 3900, loss[loss=0.1678, simple_loss=0.2316, pruned_loss=0.05195, over 4820.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2455, pruned_loss=0.0513, over 954415.39 frames. ], batch size: 39, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:28:24,996 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112745.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:28:34,454 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.220e+01 1.530e+02 1.887e+02 2.253e+02 4.348e+02, threshold=3.774e+02, percent-clipped=2.0 +2023-04-27 14:28:41,691 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=112770.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:28:45,926 INFO [finetune.py:976] (0/7) Epoch 20, batch 3950, loss[loss=0.184, simple_loss=0.2291, pruned_loss=0.06943, over 4302.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2419, pruned_loss=0.05019, over 954096.87 frames. ], batch size: 18, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:29:02,239 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2094, 2.2564, 1.8990, 1.9258, 2.3178, 1.7440, 2.8491, 1.6835], + device='cuda:0'), covar=tensor([0.3814, 0.1822, 0.4288, 0.3016, 0.1775, 0.2541, 0.1366, 0.4108], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0348, 0.0425, 0.0354, 0.0380, 0.0375, 0.0372, 0.0418], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:29:20,151 INFO [finetune.py:976] (0/7) Epoch 20, batch 4000, loss[loss=0.1491, simple_loss=0.2255, pruned_loss=0.03634, over 4907.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2412, pruned_loss=0.04994, over 950162.63 frames. ], batch size: 43, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:29:41,598 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5486, 1.7944, 1.9398, 2.0210, 1.8503, 1.9028, 1.9867, 1.9977], + device='cuda:0'), covar=tensor([0.4362, 0.6089, 0.4857, 0.4706, 0.5805, 0.7352, 0.6019, 0.5087], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0373, 0.0321, 0.0334, 0.0344, 0.0394, 0.0355, 0.0326], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 14:29:42,039 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.029e+02 1.556e+02 1.866e+02 2.319e+02 5.001e+02, threshold=3.732e+02, percent-clipped=1.0 +2023-04-27 14:29:48,009 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 14:29:53,758 INFO [finetune.py:976] (0/7) Epoch 20, batch 4050, loss[loss=0.1912, simple_loss=0.2628, pruned_loss=0.05983, over 4831.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2444, pruned_loss=0.05113, over 950690.32 frames. ], batch size: 49, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:30:03,647 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6986, 3.6580, 2.7448, 4.2660, 3.6713, 3.6010, 1.5986, 3.6859], + device='cuda:0'), covar=tensor([0.1695, 0.1247, 0.4017, 0.1428, 0.3572, 0.1840, 0.5747, 0.2366], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0216, 0.0252, 0.0307, 0.0297, 0.0248, 0.0275, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 14:30:20,602 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7473, 2.1130, 2.0060, 2.1871, 1.9476, 2.0787, 2.0730, 2.0354], + device='cuda:0'), covar=tensor([0.3693, 0.5657, 0.5099, 0.4473, 0.5731, 0.7300, 0.6486, 0.5635], + device='cuda:0'), in_proj_covar=tensor([0.0335, 0.0371, 0.0320, 0.0334, 0.0344, 0.0393, 0.0354, 0.0325], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 14:30:21,664 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112918.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:30:23,587 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=112921.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:30:27,669 INFO [finetune.py:976] (0/7) Epoch 20, batch 4100, loss[loss=0.1545, simple_loss=0.233, pruned_loss=0.038, over 4860.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2471, pruned_loss=0.0516, over 951886.47 frames. ], batch size: 34, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:30:31,346 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112933.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 14:31:05,057 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6691, 2.3878, 1.6308, 1.8212, 1.2832, 1.3068, 1.7147, 1.2223], + device='cuda:0'), covar=tensor([0.1908, 0.1277, 0.1653, 0.1614, 0.2477, 0.2146, 0.1085, 0.2172], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0212, 0.0169, 0.0204, 0.0200, 0.0186, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 14:31:11,352 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.236e+02 1.611e+02 1.943e+02 2.315e+02 4.443e+02, threshold=3.885e+02, percent-clipped=3.0 +2023-04-27 14:31:25,619 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=112973.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 14:31:33,459 INFO [finetune.py:976] (0/7) Epoch 20, batch 4150, loss[loss=0.1815, simple_loss=0.248, pruned_loss=0.05754, over 4825.00 frames. ], tot_loss[loss=0.177, simple_loss=0.2491, pruned_loss=0.05249, over 952605.00 frames. ], batch size: 33, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:31:42,877 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=112982.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:32:29,694 INFO [finetune.py:976] (0/7) Epoch 20, batch 4200, loss[loss=0.1988, simple_loss=0.2714, pruned_loss=0.0631, over 4803.00 frames. ], tot_loss[loss=0.1772, simple_loss=0.2493, pruned_loss=0.05256, over 953534.55 frames. ], batch size: 40, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:32:41,752 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113045.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:32:57,028 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.553e+02 1.872e+02 2.220e+02 4.301e+02, threshold=3.745e+02, percent-clipped=1.0 +2023-04-27 14:33:18,842 INFO [finetune.py:976] (0/7) Epoch 20, batch 4250, loss[loss=0.1694, simple_loss=0.234, pruned_loss=0.05236, over 4908.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.247, pruned_loss=0.05193, over 953879.19 frames. ], batch size: 46, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:33:35,093 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=113093.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:34:04,770 INFO [finetune.py:976] (0/7) Epoch 20, batch 4300, loss[loss=0.1422, simple_loss=0.2224, pruned_loss=0.03102, over 4818.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2451, pruned_loss=0.05143, over 955340.14 frames. ], batch size: 39, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:34:27,839 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.100e+02 1.602e+02 1.982e+02 2.267e+02 5.350e+02, threshold=3.963e+02, percent-clipped=3.0 +2023-04-27 14:34:38,150 INFO [finetune.py:976] (0/7) Epoch 20, batch 4350, loss[loss=0.1173, simple_loss=0.1877, pruned_loss=0.02344, over 4728.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2416, pruned_loss=0.05043, over 954691.78 frames. ], batch size: 23, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:35:06,267 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113218.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:35:11,662 INFO [finetune.py:976] (0/7) Epoch 20, batch 4400, loss[loss=0.2007, simple_loss=0.2813, pruned_loss=0.06004, over 4845.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.243, pruned_loss=0.05142, over 954125.70 frames. ], batch size: 47, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:35:15,423 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113233.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:35:16,017 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113234.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:35:24,430 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 14:35:34,719 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.136e+02 1.671e+02 1.942e+02 2.362e+02 3.791e+02, threshold=3.884e+02, percent-clipped=0.0 +2023-04-27 14:35:38,465 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=113266.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:35:42,676 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113273.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 14:35:45,062 INFO [finetune.py:976] (0/7) Epoch 20, batch 4450, loss[loss=0.1651, simple_loss=0.2521, pruned_loss=0.03908, over 4759.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2457, pruned_loss=0.05198, over 954122.42 frames. ], batch size: 28, lr: 3.23e-03, grad_scale: 32.0 +2023-04-27 14:35:45,129 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113277.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:35:47,575 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=113281.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:35:56,992 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113295.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:36:04,595 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6482, 1.9538, 1.7715, 1.8531, 1.5278, 1.6692, 1.6894, 1.2408], + device='cuda:0'), covar=tensor([0.1813, 0.1347, 0.0746, 0.1254, 0.3104, 0.1193, 0.1865, 0.2421], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0300, 0.0215, 0.0277, 0.0311, 0.0257, 0.0247, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1475e-04, 1.1907e-04, 8.5162e-05, 1.0950e-04, 1.2617e-04, 1.0175e-04, + 1.0006e-04, 1.0301e-04], device='cuda:0') +2023-04-27 14:36:07,974 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6921, 2.7345, 2.0761, 2.4470, 2.7824, 2.3096, 3.6040, 1.8890], + device='cuda:0'), covar=tensor([0.3540, 0.1895, 0.4287, 0.3257, 0.1716, 0.2363, 0.1262, 0.4216], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0344, 0.0420, 0.0349, 0.0375, 0.0369, 0.0367, 0.0412], + device='cuda:0'), out_proj_covar=tensor([9.9779e-05, 1.0297e-04, 1.2760e-04, 1.0544e-04, 1.1184e-04, 1.1020e-04, + 1.0787e-04, 1.2443e-04], device='cuda:0') +2023-04-27 14:36:15,056 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=113321.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:36:18,628 INFO [finetune.py:976] (0/7) Epoch 20, batch 4500, loss[loss=0.1519, simple_loss=0.2326, pruned_loss=0.03556, over 4893.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2472, pruned_loss=0.05215, over 952675.32 frames. ], batch size: 36, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:36:21,132 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.1928, 3.1959, 2.4457, 3.7404, 3.2140, 3.2042, 1.6721, 3.2388], + device='cuda:0'), covar=tensor([0.2243, 0.1572, 0.4432, 0.2697, 0.3027, 0.2181, 0.5555, 0.2803], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0216, 0.0251, 0.0307, 0.0296, 0.0248, 0.0275, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 14:37:03,587 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.082e+01 1.625e+02 1.957e+02 2.322e+02 4.205e+02, threshold=3.914e+02, percent-clipped=1.0 +2023-04-27 14:37:25,449 INFO [finetune.py:976] (0/7) Epoch 20, batch 4550, loss[loss=0.1903, simple_loss=0.2618, pruned_loss=0.05942, over 4916.00 frames. ], tot_loss[loss=0.1775, simple_loss=0.249, pruned_loss=0.053, over 953049.81 frames. ], batch size: 38, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:37:49,672 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.00 vs. limit=5.0 +2023-04-27 14:38:02,717 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2512, 1.6963, 1.5291, 2.1981, 2.3327, 1.9847, 1.9310, 1.6711], + device='cuda:0'), covar=tensor([0.1751, 0.1681, 0.1627, 0.1484, 0.1163, 0.1612, 0.1838, 0.2155], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0314, 0.0352, 0.0290, 0.0328, 0.0308, 0.0302, 0.0375], + device='cuda:0'), out_proj_covar=tensor([6.4325e-05, 6.5051e-05, 7.4380e-05, 5.8552e-05, 6.7711e-05, 6.4590e-05, + 6.3312e-05, 7.9683e-05], device='cuda:0') +2023-04-27 14:38:24,622 INFO [finetune.py:976] (0/7) Epoch 20, batch 4600, loss[loss=0.1373, simple_loss=0.2187, pruned_loss=0.02801, over 4745.00 frames. ], tot_loss[loss=0.1769, simple_loss=0.2485, pruned_loss=0.05262, over 953465.57 frames. ], batch size: 28, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:38:25,336 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9506, 2.3873, 2.1445, 2.1730, 1.6658, 1.9617, 1.9840, 1.4798], + device='cuda:0'), covar=tensor([0.1935, 0.1135, 0.0642, 0.1265, 0.2991, 0.1071, 0.1825, 0.2712], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0300, 0.0215, 0.0276, 0.0311, 0.0256, 0.0248, 0.0261], + device='cuda:0'), out_proj_covar=tensor([1.1492e-04, 1.1896e-04, 8.5169e-05, 1.0930e-04, 1.2604e-04, 1.0135e-04, + 1.0029e-04, 1.0340e-04], device='cuda:0') +2023-04-27 14:39:00,869 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.917e+01 1.466e+02 1.708e+02 2.051e+02 4.040e+02, threshold=3.416e+02, percent-clipped=1.0 +2023-04-27 14:39:13,176 INFO [finetune.py:976] (0/7) Epoch 20, batch 4650, loss[loss=0.1314, simple_loss=0.2074, pruned_loss=0.02772, over 4769.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2456, pruned_loss=0.05177, over 954176.95 frames. ], batch size: 28, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:39:21,894 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 14:39:37,907 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1963, 1.9196, 2.3148, 2.5764, 2.2451, 2.0585, 2.1971, 2.1355], + device='cuda:0'), covar=tensor([0.4938, 0.7313, 0.7932, 0.6103, 0.6821, 0.8995, 0.8890, 0.9508], + device='cuda:0'), in_proj_covar=tensor([0.0429, 0.0410, 0.0504, 0.0504, 0.0457, 0.0484, 0.0493, 0.0498], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:39:47,027 INFO [finetune.py:976] (0/7) Epoch 20, batch 4700, loss[loss=0.1358, simple_loss=0.2158, pruned_loss=0.02789, over 4919.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2415, pruned_loss=0.04999, over 955059.72 frames. ], batch size: 43, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:40:01,204 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0789, 2.6148, 2.2342, 2.3386, 1.6820, 2.1336, 2.0791, 1.6276], + device='cuda:0'), covar=tensor([0.2066, 0.1052, 0.0703, 0.1134, 0.3414, 0.1007, 0.2019, 0.2613], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0300, 0.0215, 0.0276, 0.0312, 0.0256, 0.0249, 0.0261], + device='cuda:0'), out_proj_covar=tensor([1.1477e-04, 1.1920e-04, 8.5335e-05, 1.0935e-04, 1.2654e-04, 1.0167e-04, + 1.0054e-04, 1.0354e-04], device='cuda:0') +2023-04-27 14:40:01,914 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-27 14:40:06,056 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-27 14:40:08,115 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.331e+01 1.541e+02 1.809e+02 2.201e+02 3.607e+02, threshold=3.618e+02, percent-clipped=1.0 +2023-04-27 14:40:18,935 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4975, 1.7402, 1.9454, 2.0215, 1.8756, 1.9850, 2.0287, 2.0203], + device='cuda:0'), covar=tensor([0.3985, 0.5129, 0.4513, 0.4582, 0.5441, 0.7121, 0.5325, 0.4450], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0373, 0.0320, 0.0335, 0.0345, 0.0394, 0.0356, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 14:40:19,997 INFO [finetune.py:976] (0/7) Epoch 20, batch 4750, loss[loss=0.1412, simple_loss=0.2199, pruned_loss=0.03123, over 4923.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2403, pruned_loss=0.0497, over 955203.18 frames. ], batch size: 38, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:40:20,083 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113577.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:40:29,021 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113590.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:40:46,838 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113617.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:40:52,160 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=113625.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:40:53,787 INFO [finetune.py:976] (0/7) Epoch 20, batch 4800, loss[loss=0.1932, simple_loss=0.2802, pruned_loss=0.05308, over 4856.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2438, pruned_loss=0.05113, over 953775.18 frames. ], batch size: 44, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:41:06,516 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=113646.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:41:07,137 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5956, 1.2845, 1.2426, 1.3927, 1.7852, 1.3885, 1.2350, 1.1794], + device='cuda:0'), covar=tensor([0.1463, 0.1596, 0.1799, 0.1356, 0.0826, 0.1779, 0.1905, 0.2385], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0311, 0.0350, 0.0287, 0.0324, 0.0306, 0.0300, 0.0371], + device='cuda:0'), out_proj_covar=tensor([6.3681e-05, 6.4486e-05, 7.3900e-05, 5.8074e-05, 6.6963e-05, 6.4204e-05, + 6.2787e-05, 7.8997e-05], device='cuda:0') +2023-04-27 14:41:15,405 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.571e+02 1.898e+02 2.197e+02 3.707e+02, threshold=3.796e+02, percent-clipped=1.0 +2023-04-27 14:41:17,816 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9983, 1.9747, 1.7041, 1.6647, 2.0158, 1.6302, 2.4355, 1.4622], + device='cuda:0'), covar=tensor([0.3307, 0.1673, 0.4295, 0.2730, 0.1577, 0.2220, 0.1373, 0.4302], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0344, 0.0421, 0.0350, 0.0377, 0.0370, 0.0367, 0.0415], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:41:27,086 INFO [finetune.py:976] (0/7) Epoch 20, batch 4850, loss[loss=0.1684, simple_loss=0.2405, pruned_loss=0.04818, over 4759.00 frames. ], tot_loss[loss=0.176, simple_loss=0.2471, pruned_loss=0.05241, over 952917.69 frames. ], batch size: 28, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:41:27,788 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113678.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:41:46,446 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=113707.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:42:00,310 INFO [finetune.py:976] (0/7) Epoch 20, batch 4900, loss[loss=0.1964, simple_loss=0.2732, pruned_loss=0.05987, over 4830.00 frames. ], tot_loss[loss=0.1765, simple_loss=0.2486, pruned_loss=0.05222, over 952058.63 frames. ], batch size: 39, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:42:19,436 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.56 vs. limit=2.0 +2023-04-27 14:42:27,122 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.699e+02 1.953e+02 2.350e+02 6.319e+02, threshold=3.906e+02, percent-clipped=4.0 +2023-04-27 14:42:38,975 INFO [finetune.py:976] (0/7) Epoch 20, batch 4950, loss[loss=0.1776, simple_loss=0.2544, pruned_loss=0.05042, over 4839.00 frames. ], tot_loss[loss=0.1771, simple_loss=0.2494, pruned_loss=0.05245, over 952000.08 frames. ], batch size: 47, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:43:30,853 INFO [finetune.py:976] (0/7) Epoch 20, batch 5000, loss[loss=0.1839, simple_loss=0.2516, pruned_loss=0.05814, over 4927.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2474, pruned_loss=0.05126, over 953419.20 frames. ], batch size: 38, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:44:03,534 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=2.00 vs. limit=2.0 +2023-04-27 14:44:14,003 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.052e+02 1.541e+02 1.777e+02 2.201e+02 3.232e+02, threshold=3.554e+02, percent-clipped=0.0 +2023-04-27 14:44:23,572 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.4766, 1.4318, 1.4404, 1.0434, 1.3593, 1.2277, 1.7263, 1.3058], + device='cuda:0'), covar=tensor([0.3697, 0.1754, 0.5072, 0.2605, 0.1685, 0.2197, 0.1583, 0.4915], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0347, 0.0424, 0.0352, 0.0380, 0.0373, 0.0370, 0.0418], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:44:32,033 INFO [finetune.py:976] (0/7) Epoch 20, batch 5050, loss[loss=0.1489, simple_loss=0.2166, pruned_loss=0.04061, over 4846.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2443, pruned_loss=0.05041, over 954999.89 frames. ], batch size: 49, lr: 3.22e-03, grad_scale: 32.0 +2023-04-27 14:44:52,875 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=113890.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:45:21,463 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.60 vs. limit=2.0 +2023-04-27 14:45:34,213 INFO [finetune.py:976] (0/7) Epoch 20, batch 5100, loss[loss=0.1707, simple_loss=0.2471, pruned_loss=0.04709, over 4901.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2409, pruned_loss=0.04928, over 954832.34 frames. ], batch size: 35, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:45:42,065 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=113938.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:45:45,739 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 14:45:50,957 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.10 vs. limit=5.0 +2023-04-27 14:45:57,340 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.243e+01 1.576e+02 1.837e+02 2.238e+02 4.174e+02, threshold=3.673e+02, percent-clipped=2.0 +2023-04-27 14:46:05,379 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=113973.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:46:08,236 INFO [finetune.py:976] (0/7) Epoch 20, batch 5150, loss[loss=0.1375, simple_loss=0.2181, pruned_loss=0.02842, over 4760.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2411, pruned_loss=0.04981, over 955820.09 frames. ], batch size: 27, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:46:24,884 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-114000.pt +2023-04-27 14:46:27,889 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114002.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:46:27,972 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7713, 1.4753, 1.8499, 2.1959, 1.9014, 1.7168, 1.7970, 1.7801], + device='cuda:0'), covar=tensor([0.3700, 0.5619, 0.4781, 0.4476, 0.4706, 0.6343, 0.6484, 0.7017], + device='cuda:0'), in_proj_covar=tensor([0.0431, 0.0413, 0.0508, 0.0509, 0.0460, 0.0488, 0.0497, 0.0502], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:46:43,543 INFO [finetune.py:976] (0/7) Epoch 20, batch 5200, loss[loss=0.2038, simple_loss=0.2722, pruned_loss=0.06774, over 4782.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2454, pruned_loss=0.05161, over 953614.35 frames. ], batch size: 29, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:47:06,606 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.860e+01 1.682e+02 1.942e+02 2.331e+02 4.447e+02, threshold=3.884e+02, percent-clipped=1.0 +2023-04-27 14:47:16,856 INFO [finetune.py:976] (0/7) Epoch 20, batch 5250, loss[loss=0.2207, simple_loss=0.2871, pruned_loss=0.07715, over 4845.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2468, pruned_loss=0.05219, over 952683.19 frames. ], batch size: 44, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:47:27,544 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6222, 2.0528, 1.8184, 1.9442, 1.4593, 1.7784, 1.6939, 1.3491], + device='cuda:0'), covar=tensor([0.1774, 0.0987, 0.0769, 0.1044, 0.3261, 0.0984, 0.1775, 0.2312], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0300, 0.0216, 0.0277, 0.0312, 0.0256, 0.0248, 0.0262], + device='cuda:0'), out_proj_covar=tensor([1.1458e-04, 1.1928e-04, 8.5568e-05, 1.0957e-04, 1.2671e-04, 1.0171e-04, + 1.0047e-04, 1.0364e-04], device='cuda:0') +2023-04-27 14:47:50,695 INFO [finetune.py:976] (0/7) Epoch 20, batch 5300, loss[loss=0.1277, simple_loss=0.2031, pruned_loss=0.02614, over 4829.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2474, pruned_loss=0.05192, over 953319.32 frames. ], batch size: 25, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:47:50,810 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114127.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:48:13,280 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.642e+01 1.650e+02 1.908e+02 2.204e+02 4.976e+02, threshold=3.816e+02, percent-clipped=2.0 +2023-04-27 14:48:24,237 INFO [finetune.py:976] (0/7) Epoch 20, batch 5350, loss[loss=0.1567, simple_loss=0.23, pruned_loss=0.04167, over 4708.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2459, pruned_loss=0.05056, over 950727.10 frames. ], batch size: 23, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:48:31,431 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114188.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:48:40,438 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.5361, 3.4879, 2.5842, 4.1380, 3.5400, 3.5503, 1.4524, 3.5687], + device='cuda:0'), covar=tensor([0.1906, 0.1248, 0.3388, 0.2193, 0.2737, 0.2005, 0.5887, 0.2592], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0216, 0.0253, 0.0306, 0.0296, 0.0247, 0.0275, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 14:48:58,068 INFO [finetune.py:976] (0/7) Epoch 20, batch 5400, loss[loss=0.1702, simple_loss=0.2371, pruned_loss=0.05169, over 4767.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2442, pruned_loss=0.05034, over 952232.90 frames. ], batch size: 27, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:49:33,868 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.776e+01 1.553e+02 1.821e+02 2.286e+02 4.099e+02, threshold=3.642e+02, percent-clipped=1.0 +2023-04-27 14:49:37,011 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1113, 1.7966, 2.3267, 2.5527, 2.1539, 2.0149, 2.1427, 2.1405], + device='cuda:0'), covar=tensor([0.4986, 0.7370, 0.7367, 0.5872, 0.6196, 0.8737, 0.9321, 0.8780], + device='cuda:0'), in_proj_covar=tensor([0.0430, 0.0412, 0.0506, 0.0507, 0.0459, 0.0488, 0.0495, 0.0499], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:49:53,915 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114273.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:49:56,281 INFO [finetune.py:976] (0/7) Epoch 20, batch 5450, loss[loss=0.1591, simple_loss=0.2359, pruned_loss=0.04113, over 4764.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2419, pruned_loss=0.04967, over 952835.46 frames. ], batch size: 59, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:50:28,200 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114302.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:50:58,434 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=114321.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:51:02,108 INFO [finetune.py:976] (0/7) Epoch 20, batch 5500, loss[loss=0.1346, simple_loss=0.2061, pruned_loss=0.03159, over 4761.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2405, pruned_loss=0.04962, over 953244.87 frames. ], batch size: 26, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:51:02,844 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1959, 1.5880, 1.3494, 1.7044, 1.6244, 1.9386, 1.3755, 3.5465], + device='cuda:0'), covar=tensor([0.0597, 0.0814, 0.0810, 0.1235, 0.0630, 0.0529, 0.0776, 0.0157], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0040, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 14:51:20,963 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9982, 2.3362, 0.9883, 1.2432, 1.6867, 1.0936, 2.5135, 1.3579], + device='cuda:0'), covar=tensor([0.0682, 0.0491, 0.0606, 0.1340, 0.0436, 0.1087, 0.0332, 0.0743], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0074, 0.0052], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 14:51:22,115 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=114350.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:51:28,689 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.538e+02 1.938e+02 2.403e+02 5.552e+02, threshold=3.877e+02, percent-clipped=2.0 +2023-04-27 14:51:41,082 INFO [finetune.py:976] (0/7) Epoch 20, batch 5550, loss[loss=0.1807, simple_loss=0.2649, pruned_loss=0.04827, over 4837.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2427, pruned_loss=0.05057, over 951260.92 frames. ], batch size: 47, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:52:13,272 INFO [finetune.py:976] (0/7) Epoch 20, batch 5600, loss[loss=0.1811, simple_loss=0.261, pruned_loss=0.05057, over 4924.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.246, pruned_loss=0.05158, over 950433.86 frames. ], batch size: 42, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:52:32,512 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.241e+02 1.607e+02 1.931e+02 2.416e+02 4.718e+02, threshold=3.861e+02, percent-clipped=3.0 +2023-04-27 14:52:42,452 INFO [finetune.py:976] (0/7) Epoch 20, batch 5650, loss[loss=0.1712, simple_loss=0.2408, pruned_loss=0.05077, over 4854.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2474, pruned_loss=0.0513, over 951946.06 frames. ], batch size: 31, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:52:46,419 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=114483.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:53:12,740 INFO [finetune.py:976] (0/7) Epoch 20, batch 5700, loss[loss=0.1565, simple_loss=0.2208, pruned_loss=0.04612, over 4189.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2444, pruned_loss=0.05103, over 939128.58 frames. ], batch size: 18, lr: 3.22e-03, grad_scale: 64.0 +2023-04-27 14:53:29,352 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-20.pt +2023-04-27 14:53:39,071 INFO [finetune.py:976] (0/7) Epoch 21, batch 0, loss[loss=0.1354, simple_loss=0.2133, pruned_loss=0.02875, over 4701.00 frames. ], tot_loss[loss=0.1354, simple_loss=0.2133, pruned_loss=0.02875, over 4701.00 frames. ], batch size: 23, lr: 3.21e-03, grad_scale: 64.0 +2023-04-27 14:53:39,072 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 14:53:46,027 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3981, 1.3926, 3.8236, 3.5454, 3.4573, 3.6641, 3.7843, 3.3821], + device='cuda:0'), covar=tensor([0.6756, 0.5289, 0.1321, 0.2132, 0.1167, 0.1392, 0.0812, 0.1589], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0306, 0.0403, 0.0403, 0.0347, 0.0407, 0.0311, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:53:56,092 INFO [finetune.py:1010] (0/7) Epoch 21, validation: loss=0.1544, simple_loss=0.2245, pruned_loss=0.04212, over 2265189.00 frames. +2023-04-27 14:53:56,092 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 14:54:02,564 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 7.383e+01 1.462e+02 1.753e+02 2.110e+02 4.375e+02, threshold=3.507e+02, percent-clipped=1.0 +2023-04-27 14:54:13,501 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9110, 1.4001, 1.6645, 1.7606, 1.6383, 1.4037, 0.8488, 1.3839], + device='cuda:0'), covar=tensor([0.2912, 0.3000, 0.1602, 0.1950, 0.2314, 0.2500, 0.4170, 0.1981], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0246, 0.0227, 0.0313, 0.0221, 0.0233, 0.0227, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 14:54:20,084 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7649, 1.2818, 1.8573, 2.2934, 1.8795, 1.7500, 1.8233, 1.8118], + device='cuda:0'), covar=tensor([0.4916, 0.7027, 0.6621, 0.5676, 0.5870, 0.8313, 0.8570, 0.9110], + device='cuda:0'), in_proj_covar=tensor([0.0427, 0.0410, 0.0503, 0.0503, 0.0455, 0.0484, 0.0493, 0.0496], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 14:54:25,506 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1757, 2.8042, 2.1618, 2.3302, 1.5600, 1.5276, 2.3258, 1.4751], + device='cuda:0'), covar=tensor([0.1716, 0.1548, 0.1361, 0.1621, 0.2458, 0.2020, 0.1027, 0.2066], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0210, 0.0169, 0.0203, 0.0200, 0.0185, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 14:54:37,746 INFO [finetune.py:976] (0/7) Epoch 21, batch 50, loss[loss=0.1553, simple_loss=0.2226, pruned_loss=0.04397, over 4809.00 frames. ], tot_loss[loss=0.1779, simple_loss=0.2482, pruned_loss=0.05377, over 214631.19 frames. ], batch size: 41, lr: 3.21e-03, grad_scale: 64.0 +2023-04-27 14:55:05,754 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1729, 1.6459, 1.9924, 2.2266, 1.9644, 1.6147, 1.0326, 1.7490], + device='cuda:0'), covar=tensor([0.3075, 0.3047, 0.1579, 0.2006, 0.2548, 0.2468, 0.4038, 0.1867], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0245, 0.0226, 0.0312, 0.0220, 0.0232, 0.0226, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 14:55:16,596 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.0462, 3.9021, 2.7204, 4.6412, 4.0667, 3.9908, 1.9553, 3.9484], + device='cuda:0'), covar=tensor([0.1616, 0.1074, 0.3195, 0.1336, 0.2690, 0.1666, 0.5199, 0.2400], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0215, 0.0252, 0.0306, 0.0296, 0.0247, 0.0275, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 14:55:37,893 INFO [finetune.py:976] (0/7) Epoch 21, batch 100, loss[loss=0.1923, simple_loss=0.2509, pruned_loss=0.06684, over 4379.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2395, pruned_loss=0.04971, over 379379.83 frames. ], batch size: 19, lr: 3.21e-03, grad_scale: 64.0 +2023-04-27 14:55:48,246 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.033e+02 1.526e+02 1.765e+02 2.101e+02 5.147e+02, threshold=3.531e+02, percent-clipped=4.0 +2023-04-27 14:56:44,431 INFO [finetune.py:976] (0/7) Epoch 21, batch 150, loss[loss=0.1858, simple_loss=0.2511, pruned_loss=0.06027, over 4917.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2359, pruned_loss=0.04946, over 505662.38 frames. ], batch size: 36, lr: 3.21e-03, grad_scale: 64.0 +2023-04-27 14:56:57,788 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114716.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:57:02,149 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.40 vs. limit=5.0 +2023-04-27 14:57:22,715 INFO [finetune.py:976] (0/7) Epoch 21, batch 200, loss[loss=0.1833, simple_loss=0.2589, pruned_loss=0.05388, over 4818.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.235, pruned_loss=0.04866, over 605934.36 frames. ], batch size: 39, lr: 3.21e-03, grad_scale: 64.0 +2023-04-27 14:57:26,740 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.480e+02 1.757e+02 1.981e+02 3.579e+02, threshold=3.513e+02, percent-clipped=1.0 +2023-04-27 14:57:28,013 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4056, 1.4069, 1.8314, 1.7703, 1.3549, 1.2570, 1.4610, 0.9860], + device='cuda:0'), covar=tensor([0.0608, 0.0671, 0.0351, 0.0620, 0.0754, 0.1051, 0.0558, 0.0591], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0069, 0.0067, 0.0067, 0.0075, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 14:57:38,721 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114777.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:57:42,330 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=114783.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:57:56,200 INFO [finetune.py:976] (0/7) Epoch 21, batch 250, loss[loss=0.1825, simple_loss=0.2512, pruned_loss=0.05689, over 4830.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2406, pruned_loss=0.04982, over 685150.88 frames. ], batch size: 30, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 14:58:12,308 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1229, 0.7844, 0.9695, 0.8122, 1.2426, 1.0039, 0.9340, 0.9790], + device='cuda:0'), covar=tensor([0.1721, 0.1520, 0.2073, 0.1718, 0.1098, 0.1417, 0.1627, 0.2422], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0313, 0.0351, 0.0290, 0.0327, 0.0309, 0.0303, 0.0374], + device='cuda:0'), out_proj_covar=tensor([6.4143e-05, 6.4905e-05, 7.4186e-05, 5.8575e-05, 6.7459e-05, 6.4820e-05, + 6.3520e-05, 7.9509e-05], device='cuda:0') +2023-04-27 14:58:15,121 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=114831.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:58:30,015 INFO [finetune.py:976] (0/7) Epoch 21, batch 300, loss[loss=0.2019, simple_loss=0.2809, pruned_loss=0.0615, over 4829.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2435, pruned_loss=0.05003, over 746588.87 frames. ], batch size: 47, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 14:58:34,664 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.745e+01 1.667e+02 1.887e+02 2.264e+02 4.948e+02, threshold=3.774e+02, percent-clipped=2.0 +2023-04-27 14:58:42,278 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=114871.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:59:03,088 INFO [finetune.py:976] (0/7) Epoch 21, batch 350, loss[loss=0.1619, simple_loss=0.2428, pruned_loss=0.04048, over 4918.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2459, pruned_loss=0.05106, over 787666.38 frames. ], batch size: 38, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 14:59:22,182 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=114932.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 14:59:32,270 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6825, 3.3298, 2.7484, 3.1331, 2.3113, 2.8967, 2.9612, 2.2666], + device='cuda:0'), covar=tensor([0.2134, 0.1226, 0.0819, 0.1111, 0.3319, 0.1174, 0.1962, 0.2986], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0298, 0.0214, 0.0274, 0.0309, 0.0253, 0.0246, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1338e-04, 1.1853e-04, 8.4804e-05, 1.0851e-04, 1.2547e-04, 1.0004e-04, + 9.9484e-05, 1.0321e-04], device='cuda:0') +2023-04-27 14:59:36,225 INFO [finetune.py:976] (0/7) Epoch 21, batch 400, loss[loss=0.1538, simple_loss=0.2324, pruned_loss=0.0376, over 4809.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.248, pruned_loss=0.0509, over 827469.47 frames. ], batch size: 25, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 14:59:40,904 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.017e+02 1.660e+02 1.983e+02 2.165e+02 4.861e+02, threshold=3.966e+02, percent-clipped=1.0 +2023-04-27 15:00:10,135 INFO [finetune.py:976] (0/7) Epoch 21, batch 450, loss[loss=0.1402, simple_loss=0.2072, pruned_loss=0.03658, over 4821.00 frames. ], tot_loss[loss=0.1742, simple_loss=0.2471, pruned_loss=0.05063, over 855334.75 frames. ], batch size: 33, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 15:00:19,946 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-27 15:00:59,032 INFO [finetune.py:976] (0/7) Epoch 21, batch 500, loss[loss=0.1486, simple_loss=0.2213, pruned_loss=0.03788, over 4786.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2452, pruned_loss=0.05017, over 879618.21 frames. ], batch size: 29, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 15:01:08,724 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-27 15:01:09,614 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.974e+01 1.571e+02 1.812e+02 2.235e+02 3.288e+02, threshold=3.623e+02, percent-clipped=0.0 +2023-04-27 15:01:16,896 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115072.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:01:24,112 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6233, 1.5736, 0.8006, 1.3275, 1.7663, 1.5212, 1.4256, 1.4778], + device='cuda:0'), covar=tensor([0.0497, 0.0385, 0.0349, 0.0558, 0.0269, 0.0499, 0.0490, 0.0560], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 15:01:32,222 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115086.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:01:33,479 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 15:01:54,523 INFO [finetune.py:976] (0/7) Epoch 21, batch 550, loss[loss=0.1522, simple_loss=0.2146, pruned_loss=0.04487, over 4796.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2426, pruned_loss=0.04999, over 897029.14 frames. ], batch size: 25, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 15:02:18,291 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.66 vs. limit=2.0 +2023-04-27 15:02:53,389 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115147.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:03:02,662 INFO [finetune.py:976] (0/7) Epoch 21, batch 600, loss[loss=0.1922, simple_loss=0.2597, pruned_loss=0.0623, over 4088.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2438, pruned_loss=0.05056, over 908834.79 frames. ], batch size: 65, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 15:03:12,532 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.066e+02 1.494e+02 1.729e+02 2.315e+02 4.392e+02, threshold=3.458e+02, percent-clipped=4.0 +2023-04-27 15:04:02,672 INFO [finetune.py:976] (0/7) Epoch 21, batch 650, loss[loss=0.1352, simple_loss=0.2087, pruned_loss=0.03088, over 4784.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.246, pruned_loss=0.05089, over 920538.76 frames. ], batch size: 26, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 15:04:17,239 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7270, 2.1495, 1.8340, 2.0267, 1.6460, 1.8634, 1.7114, 1.4065], + device='cuda:0'), covar=tensor([0.1848, 0.1141, 0.0827, 0.1198, 0.3245, 0.1218, 0.1930, 0.2288], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0301, 0.0216, 0.0277, 0.0312, 0.0255, 0.0249, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1481e-04, 1.1966e-04, 8.5668e-05, 1.0961e-04, 1.2633e-04, 1.0100e-04, + 1.0062e-04, 1.0422e-04], device='cuda:0') +2023-04-27 15:04:17,775 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115227.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:04:36,594 INFO [finetune.py:976] (0/7) Epoch 21, batch 700, loss[loss=0.2147, simple_loss=0.2862, pruned_loss=0.07165, over 4919.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2468, pruned_loss=0.05088, over 927478.18 frames. ], batch size: 38, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 15:04:40,861 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.165e+02 1.680e+02 1.934e+02 2.254e+02 3.960e+02, threshold=3.868e+02, percent-clipped=2.0 +2023-04-27 15:04:44,479 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1610, 1.7967, 2.0606, 2.4532, 2.5021, 1.9895, 1.6440, 2.2519], + device='cuda:0'), covar=tensor([0.0873, 0.1175, 0.0773, 0.0616, 0.0660, 0.0991, 0.0807, 0.0554], + device='cuda:0'), in_proj_covar=tensor([0.0189, 0.0203, 0.0185, 0.0172, 0.0178, 0.0181, 0.0153, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:05:08,182 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0726, 2.4670, 1.0102, 1.5197, 1.8039, 1.1566, 3.3078, 1.8150], + device='cuda:0'), covar=tensor([0.0684, 0.0566, 0.0742, 0.1134, 0.0497, 0.1056, 0.0258, 0.0588], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0072, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 15:05:08,844 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5556, 2.1440, 2.5861, 3.1059, 2.4863, 2.0796, 2.0006, 2.5536], + device='cuda:0'), covar=tensor([0.3429, 0.3182, 0.1705, 0.2554, 0.2966, 0.2767, 0.3734, 0.2015], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0244, 0.0225, 0.0312, 0.0219, 0.0232, 0.0225, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 15:05:10,549 INFO [finetune.py:976] (0/7) Epoch 21, batch 750, loss[loss=0.1917, simple_loss=0.2795, pruned_loss=0.0519, over 4810.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2468, pruned_loss=0.05054, over 933216.81 frames. ], batch size: 45, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 15:05:38,645 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8379, 1.6912, 1.9186, 2.2224, 2.2619, 1.7139, 1.4781, 1.9726], + device='cuda:0'), covar=tensor([0.0899, 0.1184, 0.0721, 0.0596, 0.0615, 0.0957, 0.0861, 0.0595], + device='cuda:0'), in_proj_covar=tensor([0.0188, 0.0202, 0.0184, 0.0172, 0.0177, 0.0180, 0.0152, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:05:44,407 INFO [finetune.py:976] (0/7) Epoch 21, batch 800, loss[loss=0.1528, simple_loss=0.2289, pruned_loss=0.03829, over 4784.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2446, pruned_loss=0.04952, over 936295.86 frames. ], batch size: 25, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 15:05:48,601 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.653e+01 1.488e+02 1.739e+02 2.068e+02 3.121e+02, threshold=3.478e+02, percent-clipped=0.0 +2023-04-27 15:05:54,622 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115370.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:05:55,244 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115371.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:05:55,831 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115372.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:06:04,312 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-27 15:06:18,145 INFO [finetune.py:976] (0/7) Epoch 21, batch 850, loss[loss=0.1732, simple_loss=0.24, pruned_loss=0.05324, over 4794.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2425, pruned_loss=0.04892, over 939833.06 frames. ], batch size: 51, lr: 3.21e-03, grad_scale: 32.0 +2023-04-27 15:06:20,153 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 15:06:26,158 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6594, 2.1021, 1.7933, 2.0745, 1.5685, 1.8470, 1.7254, 1.3294], + device='cuda:0'), covar=tensor([0.1974, 0.1151, 0.0877, 0.1074, 0.3266, 0.0994, 0.1818, 0.2219], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0299, 0.0215, 0.0275, 0.0309, 0.0253, 0.0247, 0.0261], + device='cuda:0'), out_proj_covar=tensor([1.1379e-04, 1.1863e-04, 8.5074e-05, 1.0862e-04, 1.2523e-04, 1.0012e-04, + 9.9763e-05, 1.0359e-04], device='cuda:0') +2023-04-27 15:06:27,914 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=115420.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:06:35,533 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115431.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:06:36,163 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115432.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:06:42,640 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115442.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:07:01,736 INFO [finetune.py:976] (0/7) Epoch 21, batch 900, loss[loss=0.1436, simple_loss=0.2161, pruned_loss=0.0356, over 4835.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2393, pruned_loss=0.04768, over 943453.24 frames. ], batch size: 33, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:07:06,006 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.637e+01 1.508e+02 1.757e+02 2.120e+02 3.037e+02, threshold=3.515e+02, percent-clipped=0.0 +2023-04-27 15:07:34,516 INFO [finetune.py:976] (0/7) Epoch 21, batch 950, loss[loss=0.1321, simple_loss=0.2187, pruned_loss=0.0227, over 4813.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2397, pruned_loss=0.0487, over 946225.66 frames. ], batch size: 45, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:07:55,172 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2518, 1.3976, 1.3551, 1.6287, 1.4579, 1.6771, 1.2702, 3.0011], + device='cuda:0'), covar=tensor([0.0608, 0.0867, 0.0814, 0.1227, 0.0686, 0.0547, 0.0801, 0.0195], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 15:07:58,920 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6152, 2.0962, 1.8069, 2.0533, 1.5767, 1.7583, 1.6996, 1.3941], + device='cuda:0'), covar=tensor([0.1790, 0.1049, 0.0822, 0.1093, 0.3408, 0.1113, 0.1808, 0.2213], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0300, 0.0215, 0.0276, 0.0311, 0.0254, 0.0248, 0.0262], + device='cuda:0'), out_proj_covar=tensor([1.1431e-04, 1.1896e-04, 8.5228e-05, 1.0916e-04, 1.2588e-04, 1.0051e-04, + 1.0011e-04, 1.0401e-04], device='cuda:0') +2023-04-27 15:08:06,309 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115527.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:08:08,674 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8828, 2.0752, 2.1436, 2.2864, 2.0565, 2.0708, 2.2482, 2.1821], + device='cuda:0'), covar=tensor([0.4838, 0.6803, 0.5313, 0.4691, 0.5909, 0.7753, 0.5949, 0.5572], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0374, 0.0321, 0.0335, 0.0345, 0.0393, 0.0356, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 15:08:39,541 INFO [finetune.py:976] (0/7) Epoch 21, batch 1000, loss[loss=0.2064, simple_loss=0.2731, pruned_loss=0.06988, over 4819.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2421, pruned_loss=0.04955, over 948739.50 frames. ], batch size: 38, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:08:49,082 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.133e+02 1.678e+02 2.025e+02 2.586e+02 4.511e+02, threshold=4.050e+02, percent-clipped=4.0 +2023-04-27 15:09:08,287 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=115575.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:09:45,484 INFO [finetune.py:976] (0/7) Epoch 21, batch 1050, loss[loss=0.1236, simple_loss=0.1966, pruned_loss=0.02532, over 4781.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2453, pruned_loss=0.05062, over 951104.43 frames. ], batch size: 26, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:10:18,959 INFO [finetune.py:976] (0/7) Epoch 21, batch 1100, loss[loss=0.2015, simple_loss=0.2676, pruned_loss=0.06769, over 4839.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2459, pruned_loss=0.05083, over 951025.69 frames. ], batch size: 49, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:10:23,752 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.551e+02 1.797e+02 2.277e+02 4.571e+02, threshold=3.594e+02, percent-clipped=1.0 +2023-04-27 15:10:52,442 INFO [finetune.py:976] (0/7) Epoch 21, batch 1150, loss[loss=0.1238, simple_loss=0.1857, pruned_loss=0.03099, over 4315.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2459, pruned_loss=0.05065, over 950638.62 frames. ], batch size: 19, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:11:07,901 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115726.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:11:08,523 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=115727.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:11:18,206 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=115742.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:11:18,250 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1410, 2.8255, 2.1511, 2.2279, 1.6494, 1.5166, 2.3218, 1.5150], + device='cuda:0'), covar=tensor([0.1697, 0.1448, 0.1412, 0.1755, 0.2238, 0.2055, 0.0984, 0.2053], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0210, 0.0168, 0.0203, 0.0199, 0.0183, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 15:11:22,973 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5705, 2.0672, 2.4371, 2.9901, 2.3747, 1.9095, 1.8594, 2.4214], + device='cuda:0'), covar=tensor([0.3276, 0.3041, 0.1670, 0.2449, 0.2710, 0.2691, 0.3736, 0.2011], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0245, 0.0226, 0.0314, 0.0219, 0.0232, 0.0227, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 15:11:25,942 INFO [finetune.py:976] (0/7) Epoch 21, batch 1200, loss[loss=0.1776, simple_loss=0.2532, pruned_loss=0.05095, over 4891.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2443, pruned_loss=0.05001, over 951489.72 frames. ], batch size: 36, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:11:31,121 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.519e+02 1.757e+02 2.035e+02 5.645e+02, threshold=3.514e+02, percent-clipped=1.0 +2023-04-27 15:11:46,026 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8228, 1.7692, 0.9952, 1.4577, 1.7797, 1.6843, 1.5579, 1.6013], + device='cuda:0'), covar=tensor([0.0488, 0.0352, 0.0328, 0.0555, 0.0269, 0.0456, 0.0462, 0.0535], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 15:11:50,223 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=115790.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:11:59,754 INFO [finetune.py:976] (0/7) Epoch 21, batch 1250, loss[loss=0.1314, simple_loss=0.2085, pruned_loss=0.02716, over 4833.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2422, pruned_loss=0.04957, over 952401.20 frames. ], batch size: 30, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:12:55,528 INFO [finetune.py:976] (0/7) Epoch 21, batch 1300, loss[loss=0.1567, simple_loss=0.233, pruned_loss=0.04017, over 4711.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2403, pruned_loss=0.04897, over 952515.19 frames. ], batch size: 23, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:12:59,786 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.963e+01 1.535e+02 1.763e+02 2.248e+02 3.829e+02, threshold=3.527e+02, percent-clipped=1.0 +2023-04-27 15:13:06,969 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0756, 1.3396, 3.0923, 2.8721, 2.7592, 2.9482, 2.8617, 2.7436], + device='cuda:0'), covar=tensor([0.6214, 0.4556, 0.1320, 0.1743, 0.1205, 0.2104, 0.3077, 0.1441], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0304, 0.0402, 0.0400, 0.0346, 0.0404, 0.0309, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:13:21,237 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=115892.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 15:13:29,455 INFO [finetune.py:976] (0/7) Epoch 21, batch 1350, loss[loss=0.1823, simple_loss=0.2498, pruned_loss=0.05747, over 4779.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2406, pruned_loss=0.0488, over 953383.84 frames. ], batch size: 26, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:14:09,080 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-04-27 15:14:24,073 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 15:14:34,416 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=115953.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 15:14:34,901 INFO [finetune.py:976] (0/7) Epoch 21, batch 1400, loss[loss=0.1836, simple_loss=0.2678, pruned_loss=0.04968, over 4818.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2437, pruned_loss=0.04967, over 953689.55 frames. ], batch size: 39, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:14:44,906 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.645e+02 1.882e+02 2.243e+02 4.994e+02, threshold=3.764e+02, percent-clipped=1.0 +2023-04-27 15:14:57,444 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.82 vs. limit=5.0 +2023-04-27 15:15:07,853 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-27 15:15:12,402 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9292, 1.8316, 1.9958, 2.3924, 2.3522, 1.8548, 1.6298, 2.0881], + device='cuda:0'), covar=tensor([0.0867, 0.0971, 0.0639, 0.0554, 0.0642, 0.0987, 0.0812, 0.0600], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0198, 0.0180, 0.0169, 0.0175, 0.0178, 0.0150, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:15:22,755 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-116000.pt +2023-04-27 15:15:26,318 INFO [finetune.py:976] (0/7) Epoch 21, batch 1450, loss[loss=0.1626, simple_loss=0.2344, pruned_loss=0.04541, over 4835.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.2464, pruned_loss=0.05093, over 953283.05 frames. ], batch size: 30, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:15:28,152 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.86 vs. limit=5.0 +2023-04-27 15:15:33,421 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5051, 1.1828, 0.6930, 1.1913, 1.4010, 1.3758, 1.2887, 1.3315], + device='cuda:0'), covar=tensor([0.0506, 0.0407, 0.0382, 0.0556, 0.0302, 0.0524, 0.0497, 0.0559], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 15:15:33,437 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116014.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:15:42,217 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116026.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:15:42,818 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116027.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:15:44,626 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.3215, 4.2033, 2.9455, 4.9392, 4.2733, 4.1650, 1.8570, 4.2633], + device='cuda:0'), covar=tensor([0.1535, 0.1089, 0.3718, 0.1044, 0.2484, 0.1573, 0.5591, 0.2179], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0215, 0.0253, 0.0308, 0.0297, 0.0247, 0.0274, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 15:16:00,015 INFO [finetune.py:976] (0/7) Epoch 21, batch 1500, loss[loss=0.1528, simple_loss=0.2175, pruned_loss=0.04409, over 4751.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2472, pruned_loss=0.05088, over 954540.41 frames. ], batch size: 23, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:16:05,187 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.610e+02 1.919e+02 2.360e+02 3.995e+02, threshold=3.837e+02, percent-clipped=2.0 +2023-04-27 15:16:13,675 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=116074.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:16:14,287 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=116075.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:16:14,335 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116075.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 15:16:24,678 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7088, 2.0632, 1.9051, 2.0419, 1.8488, 2.0550, 1.9645, 1.9179], + device='cuda:0'), covar=tensor([0.3986, 0.6360, 0.5377, 0.4544, 0.6254, 0.7266, 0.7269, 0.6233], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0378, 0.0323, 0.0337, 0.0348, 0.0395, 0.0358, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 15:16:33,606 INFO [finetune.py:976] (0/7) Epoch 21, batch 1550, loss[loss=0.1555, simple_loss=0.2241, pruned_loss=0.04347, over 4823.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2464, pruned_loss=0.05079, over 953531.74 frames. ], batch size: 38, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:16:34,311 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116105.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:16:41,983 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116116.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:16:53,380 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-27 15:16:53,821 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2601, 1.6485, 1.9498, 2.6789, 2.7276, 2.0563, 1.6791, 2.2918], + device='cuda:0'), covar=tensor([0.0850, 0.1538, 0.0964, 0.0554, 0.0604, 0.1028, 0.0916, 0.0631], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0199, 0.0182, 0.0170, 0.0176, 0.0179, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:17:06,680 INFO [finetune.py:976] (0/7) Epoch 21, batch 1600, loss[loss=0.1497, simple_loss=0.2304, pruned_loss=0.03449, over 4923.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2444, pruned_loss=0.05001, over 955405.74 frames. ], batch size: 38, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:17:08,696 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8017, 1.3972, 1.9758, 2.3116, 1.9392, 1.8184, 1.8776, 1.8260], + device='cuda:0'), covar=tensor([0.4369, 0.6632, 0.6159, 0.5676, 0.5710, 0.7843, 0.7938, 0.9100], + device='cuda:0'), in_proj_covar=tensor([0.0430, 0.0412, 0.0508, 0.0508, 0.0459, 0.0488, 0.0498, 0.0501], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:17:10,955 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.072e+02 1.579e+02 1.851e+02 2.317e+02 5.378e+02, threshold=3.702e+02, percent-clipped=3.0 +2023-04-27 15:17:14,640 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116166.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:17:21,325 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116177.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:17:39,788 INFO [finetune.py:976] (0/7) Epoch 21, batch 1650, loss[loss=0.1976, simple_loss=0.2551, pruned_loss=0.07002, over 4718.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2417, pruned_loss=0.0491, over 955444.29 frames. ], batch size: 23, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:17:52,578 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-27 15:18:14,846 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116248.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 15:18:18,421 INFO [finetune.py:976] (0/7) Epoch 21, batch 1700, loss[loss=0.1895, simple_loss=0.2596, pruned_loss=0.05973, over 4917.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2392, pruned_loss=0.04857, over 956694.55 frames. ], batch size: 38, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:18:28,123 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.499e+02 1.782e+02 2.142e+02 3.522e+02, threshold=3.563e+02, percent-clipped=0.0 +2023-04-27 15:18:48,792 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6056, 2.0784, 1.6934, 1.4806, 1.2090, 1.2070, 1.7402, 1.2051], + device='cuda:0'), covar=tensor([0.1702, 0.1315, 0.1437, 0.1734, 0.2379, 0.1915, 0.0978, 0.2081], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0211, 0.0169, 0.0204, 0.0200, 0.0184, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 15:19:31,552 INFO [finetune.py:976] (0/7) Epoch 21, batch 1750, loss[loss=0.152, simple_loss=0.2304, pruned_loss=0.03678, over 4861.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2406, pruned_loss=0.04906, over 955805.90 frames. ], batch size: 31, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:19:55,824 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116323.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:20:33,794 INFO [finetune.py:976] (0/7) Epoch 21, batch 1800, loss[loss=0.1759, simple_loss=0.2539, pruned_loss=0.04896, over 4822.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2444, pruned_loss=0.0507, over 956084.56 frames. ], batch size: 40, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:20:38,073 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.681e+02 1.987e+02 2.405e+02 5.932e+02, threshold=3.974e+02, percent-clipped=5.0 +2023-04-27 15:20:43,610 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116370.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 15:20:53,264 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116384.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:20:59,554 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.4044, 4.3246, 3.0114, 5.0122, 4.2838, 4.3138, 1.8235, 4.3450], + device='cuda:0'), covar=tensor([0.1498, 0.1041, 0.3455, 0.0837, 0.3008, 0.1491, 0.5241, 0.2072], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0218, 0.0255, 0.0312, 0.0302, 0.0249, 0.0279, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 15:21:00,363 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.14 vs. limit=5.0 +2023-04-27 15:21:07,564 INFO [finetune.py:976] (0/7) Epoch 21, batch 1850, loss[loss=0.1292, simple_loss=0.1956, pruned_loss=0.03139, over 4296.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.245, pruned_loss=0.05082, over 955532.19 frames. ], batch size: 18, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:21:35,311 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116446.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:21:40,096 INFO [finetune.py:976] (0/7) Epoch 21, batch 1900, loss[loss=0.187, simple_loss=0.2584, pruned_loss=0.05775, over 4884.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2459, pruned_loss=0.0507, over 956727.61 frames. ], batch size: 32, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:21:45,215 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.938e+01 1.604e+02 1.932e+02 2.429e+02 3.655e+02, threshold=3.864e+02, percent-clipped=0.0 +2023-04-27 15:21:45,303 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116461.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:21:52,092 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116472.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:21:52,116 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6348, 3.4609, 0.7578, 1.7327, 1.9357, 2.4157, 1.8391, 0.9913], + device='cuda:0'), covar=tensor([0.1277, 0.0836, 0.2193, 0.1308, 0.1125, 0.0947, 0.1524, 0.1938], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0238, 0.0136, 0.0118, 0.0132, 0.0151, 0.0115, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 15:21:58,193 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 15:21:59,227 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5033, 3.2630, 0.9942, 1.7206, 1.8222, 2.3641, 1.7678, 0.9679], + device='cuda:0'), covar=tensor([0.1387, 0.0917, 0.1866, 0.1266, 0.1121, 0.0956, 0.1599, 0.1997], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0238, 0.0136, 0.0119, 0.0132, 0.0151, 0.0115, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 15:22:13,561 INFO [finetune.py:976] (0/7) Epoch 21, batch 1950, loss[loss=0.145, simple_loss=0.2262, pruned_loss=0.03188, over 4883.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2453, pruned_loss=0.05, over 959015.42 frames. ], batch size: 35, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:22:14,266 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116505.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:22:16,020 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116507.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 15:22:16,706 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-27 15:22:42,821 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116548.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 15:22:46,842 INFO [finetune.py:976] (0/7) Epoch 21, batch 2000, loss[loss=0.1568, simple_loss=0.2209, pruned_loss=0.04641, over 4853.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2431, pruned_loss=0.04931, over 957468.85 frames. ], batch size: 44, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:22:51,556 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.496e+02 1.815e+02 2.157e+02 3.594e+02, threshold=3.630e+02, percent-clipped=0.0 +2023-04-27 15:22:55,220 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116566.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:23:01,905 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9049, 1.6195, 1.8203, 2.2159, 2.2337, 1.7410, 1.4535, 1.8958], + device='cuda:0'), covar=tensor([0.0804, 0.1023, 0.0635, 0.0485, 0.0526, 0.0763, 0.0783, 0.0574], + device='cuda:0'), in_proj_covar=tensor([0.0187, 0.0200, 0.0184, 0.0172, 0.0177, 0.0180, 0.0152, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:23:14,413 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=116596.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 15:23:20,571 INFO [finetune.py:976] (0/7) Epoch 21, batch 2050, loss[loss=0.1447, simple_loss=0.2119, pruned_loss=0.03874, over 4755.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2394, pruned_loss=0.04804, over 958326.24 frames. ], batch size: 27, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:23:30,109 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116618.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:23:37,523 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-27 15:23:39,316 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0799, 0.7638, 0.9182, 0.7946, 1.2324, 0.9707, 0.8633, 0.9173], + device='cuda:0'), covar=tensor([0.1741, 0.1696, 0.2150, 0.1683, 0.1092, 0.1530, 0.1936, 0.2689], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0312, 0.0352, 0.0290, 0.0329, 0.0311, 0.0304, 0.0374], + device='cuda:0'), out_proj_covar=tensor([6.3858e-05, 6.4554e-05, 7.4362e-05, 5.8628e-05, 6.7941e-05, 6.5186e-05, + 6.3738e-05, 7.9545e-05], device='cuda:0') +2023-04-27 15:23:59,070 INFO [finetune.py:976] (0/7) Epoch 21, batch 2100, loss[loss=0.1494, simple_loss=0.2205, pruned_loss=0.03917, over 4760.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.24, pruned_loss=0.0492, over 956156.99 frames. ], batch size: 28, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:23:59,790 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5877, 1.5246, 0.5387, 1.3025, 1.5489, 1.4643, 1.3645, 1.3764], + device='cuda:0'), covar=tensor([0.0483, 0.0372, 0.0382, 0.0533, 0.0274, 0.0494, 0.0477, 0.0561], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 15:24:03,926 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.802e+01 1.616e+02 1.834e+02 2.363e+02 4.673e+02, threshold=3.668e+02, percent-clipped=1.0 +2023-04-27 15:24:21,168 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116670.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 15:24:32,897 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116679.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:24:32,939 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116679.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:25:00,267 INFO [finetune.py:976] (0/7) Epoch 21, batch 2150, loss[loss=0.199, simple_loss=0.2659, pruned_loss=0.066, over 4904.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2445, pruned_loss=0.05065, over 956644.87 frames. ], batch size: 36, lr: 3.20e-03, grad_scale: 32.0 +2023-04-27 15:25:00,410 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116704.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:25:21,294 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=116718.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:26:05,620 INFO [finetune.py:976] (0/7) Epoch 21, batch 2200, loss[loss=0.1772, simple_loss=0.2448, pruned_loss=0.05477, over 4076.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.245, pruned_loss=0.05068, over 955605.93 frames. ], batch size: 65, lr: 3.19e-03, grad_scale: 32.0 +2023-04-27 15:26:10,852 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.212e+01 1.600e+02 1.964e+02 2.414e+02 3.602e+02, threshold=3.928e+02, percent-clipped=0.0 +2023-04-27 15:26:10,941 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116761.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:26:13,890 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116765.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:26:15,173 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0226, 2.5167, 2.0084, 1.8762, 1.3627, 1.4803, 2.1175, 1.3548], + device='cuda:0'), covar=tensor([0.1554, 0.1330, 0.1396, 0.1655, 0.2263, 0.1874, 0.0942, 0.2065], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0210, 0.0169, 0.0202, 0.0199, 0.0184, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 15:26:18,740 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116772.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:26:38,505 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116802.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 15:26:38,559 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=116802.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:26:39,689 INFO [finetune.py:976] (0/7) Epoch 21, batch 2250, loss[loss=0.2126, simple_loss=0.2775, pruned_loss=0.07385, over 4893.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2469, pruned_loss=0.05149, over 956044.55 frames. ], batch size: 43, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:26:42,838 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=116809.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:26:50,606 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=116820.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:26:51,296 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4678, 2.0078, 2.3687, 2.9917, 2.3387, 1.9023, 1.8174, 2.1865], + device='cuda:0'), covar=tensor([0.3039, 0.3062, 0.1477, 0.2130, 0.2408, 0.2567, 0.3727, 0.2132], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0244, 0.0225, 0.0312, 0.0217, 0.0230, 0.0226, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 15:27:13,183 INFO [finetune.py:976] (0/7) Epoch 21, batch 2300, loss[loss=0.1703, simple_loss=0.2437, pruned_loss=0.04846, over 4911.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.247, pruned_loss=0.05115, over 956482.08 frames. ], batch size: 37, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:27:14,474 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3080, 2.9793, 0.9912, 1.6614, 1.7106, 2.1683, 1.7200, 0.9648], + device='cuda:0'), covar=tensor([0.1381, 0.1189, 0.1807, 0.1273, 0.1133, 0.0960, 0.1535, 0.1937], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0237, 0.0134, 0.0118, 0.0131, 0.0150, 0.0115, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 15:27:17,447 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.557e+02 1.911e+02 2.274e+02 3.749e+02, threshold=3.822e+02, percent-clipped=0.0 +2023-04-27 15:27:17,534 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116861.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:27:18,830 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=116863.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:27:37,066 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5334, 1.9719, 2.3768, 2.9040, 2.3294, 1.9332, 1.7748, 2.1954], + device='cuda:0'), covar=tensor([0.3108, 0.3200, 0.1589, 0.2276, 0.2711, 0.2497, 0.3895, 0.1979], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0245, 0.0226, 0.0313, 0.0219, 0.0231, 0.0228, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 15:27:46,960 INFO [finetune.py:976] (0/7) Epoch 21, batch 2350, loss[loss=0.1658, simple_loss=0.2356, pruned_loss=0.04797, over 4766.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2442, pruned_loss=0.05043, over 956396.04 frames. ], batch size: 26, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:27:47,938 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.45 vs. limit=5.0 +2023-04-27 15:28:20,892 INFO [finetune.py:976] (0/7) Epoch 21, batch 2400, loss[loss=0.1731, simple_loss=0.242, pruned_loss=0.05213, over 4821.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2423, pruned_loss=0.04963, over 957130.43 frames. ], batch size: 39, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:28:25,119 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.594e+02 1.806e+02 2.173e+02 4.519e+02, threshold=3.612e+02, percent-clipped=1.0 +2023-04-27 15:28:34,735 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=116974.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:28:37,799 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=116979.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:28:54,617 INFO [finetune.py:976] (0/7) Epoch 21, batch 2450, loss[loss=0.1734, simple_loss=0.2521, pruned_loss=0.04731, over 4821.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.239, pruned_loss=0.04836, over 955831.54 frames. ], batch size: 41, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:29:10,686 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=117027.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:29:17,903 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4430, 1.6356, 1.5559, 1.9259, 1.8748, 2.1494, 1.4628, 3.8667], + device='cuda:0'), covar=tensor([0.0590, 0.0822, 0.0845, 0.1216, 0.0633, 0.0449, 0.0759, 0.0155], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 15:29:28,081 INFO [finetune.py:976] (0/7) Epoch 21, batch 2500, loss[loss=0.1906, simple_loss=0.2773, pruned_loss=0.05193, over 4821.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2405, pruned_loss=0.04887, over 955768.59 frames. ], batch size: 39, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:29:28,816 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0387, 1.3931, 1.3108, 1.6407, 1.5327, 1.7186, 1.2793, 2.9823], + device='cuda:0'), covar=tensor([0.0677, 0.0873, 0.0793, 0.1231, 0.0686, 0.0532, 0.0826, 0.0200], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 15:29:29,948 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7345, 1.1791, 1.8319, 2.1593, 1.7887, 1.7186, 1.7822, 1.7429], + device='cuda:0'), covar=tensor([0.4430, 0.6582, 0.6209, 0.5752, 0.5952, 0.7340, 0.7549, 0.8409], + device='cuda:0'), in_proj_covar=tensor([0.0428, 0.0411, 0.0505, 0.0504, 0.0456, 0.0485, 0.0493, 0.0498], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:29:32,761 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117060.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:29:33,286 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.592e+02 1.814e+02 2.143e+02 3.626e+02, threshold=3.628e+02, percent-clipped=1.0 +2023-04-27 15:30:29,038 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117102.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:30:30,119 INFO [finetune.py:976] (0/7) Epoch 21, batch 2550, loss[loss=0.1254, simple_loss=0.1869, pruned_loss=0.03198, over 4745.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2446, pruned_loss=0.05003, over 955333.79 frames. ], batch size: 23, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:30:59,077 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5255, 1.6830, 0.7569, 1.2444, 1.5624, 1.3836, 1.2811, 1.4133], + device='cuda:0'), covar=tensor([0.0510, 0.0354, 0.0365, 0.0569, 0.0304, 0.0518, 0.0507, 0.0556], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 15:31:33,697 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=117150.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:31:41,465 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 15:31:41,893 INFO [finetune.py:976] (0/7) Epoch 21, batch 2600, loss[loss=0.2091, simple_loss=0.275, pruned_loss=0.0716, over 4901.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2462, pruned_loss=0.05059, over 956523.46 frames. ], batch size: 35, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:31:44,414 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117158.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:31:52,559 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.651e+02 1.953e+02 2.341e+02 4.282e+02, threshold=3.906e+02, percent-clipped=1.0 +2023-04-27 15:31:52,657 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117161.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:32:19,265 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-27 15:32:27,548 INFO [finetune.py:976] (0/7) Epoch 21, batch 2650, loss[loss=0.1347, simple_loss=0.1997, pruned_loss=0.03485, over 4771.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2463, pruned_loss=0.05065, over 955542.87 frames. ], batch size: 26, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:32:30,675 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=117209.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:32:36,177 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8901, 1.5093, 1.4865, 1.6422, 2.1296, 1.7268, 1.4038, 1.3728], + device='cuda:0'), covar=tensor([0.1569, 0.1554, 0.2020, 0.1520, 0.0827, 0.1395, 0.2122, 0.2141], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0313, 0.0352, 0.0291, 0.0327, 0.0310, 0.0304, 0.0372], + device='cuda:0'), out_proj_covar=tensor([6.4108e-05, 6.4832e-05, 7.4240e-05, 5.8708e-05, 6.7545e-05, 6.5070e-05, + 6.3780e-05, 7.9093e-05], device='cuda:0') +2023-04-27 15:32:39,142 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117221.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:33:00,738 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9068, 3.7907, 2.6879, 4.4374, 3.8825, 3.8564, 1.5743, 3.8411], + device='cuda:0'), covar=tensor([0.1459, 0.1160, 0.2877, 0.1531, 0.2573, 0.1756, 0.5445, 0.2101], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0215, 0.0252, 0.0306, 0.0297, 0.0247, 0.0275, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 15:33:01,272 INFO [finetune.py:976] (0/7) Epoch 21, batch 2700, loss[loss=0.1686, simple_loss=0.2341, pruned_loss=0.05156, over 4822.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2455, pruned_loss=0.04983, over 952925.33 frames. ], batch size: 30, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:33:06,002 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.896e+01 1.368e+02 1.680e+02 2.087e+02 5.894e+02, threshold=3.359e+02, percent-clipped=1.0 +2023-04-27 15:33:15,074 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117274.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:33:19,962 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117282.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:33:19,999 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4741, 1.8011, 1.8012, 1.9485, 1.8310, 1.9442, 1.9011, 1.8816], + device='cuda:0'), covar=tensor([0.3568, 0.5105, 0.4358, 0.4287, 0.5517, 0.6782, 0.5115, 0.4704], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0372, 0.0322, 0.0335, 0.0345, 0.0393, 0.0355, 0.0327], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 15:33:35,174 INFO [finetune.py:976] (0/7) Epoch 21, batch 2750, loss[loss=0.1912, simple_loss=0.2456, pruned_loss=0.06844, over 4822.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2429, pruned_loss=0.04932, over 954660.04 frames. ], batch size: 33, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:33:47,709 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=117322.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:33:52,063 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=117329.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:34:08,696 INFO [finetune.py:976] (0/7) Epoch 21, batch 2800, loss[loss=0.1744, simple_loss=0.2359, pruned_loss=0.0564, over 4795.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2398, pruned_loss=0.0485, over 954519.48 frames. ], batch size: 29, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:34:09,391 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3297, 1.5718, 1.3385, 1.5209, 1.3242, 1.2871, 1.3934, 1.0986], + device='cuda:0'), covar=tensor([0.1681, 0.1167, 0.0967, 0.1288, 0.3570, 0.1268, 0.1644, 0.2070], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0297, 0.0215, 0.0276, 0.0308, 0.0252, 0.0245, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1393e-04, 1.1790e-04, 8.5076e-05, 1.0909e-04, 1.2475e-04, 9.9742e-05, + 9.8752e-05, 1.0313e-04], device='cuda:0') +2023-04-27 15:34:12,915 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117360.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:34:13,414 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.017e+02 1.550e+02 1.821e+02 2.376e+02 5.325e+02, threshold=3.642e+02, percent-clipped=3.0 +2023-04-27 15:34:31,546 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=117390.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:34:41,632 INFO [finetune.py:976] (0/7) Epoch 21, batch 2850, loss[loss=0.1562, simple_loss=0.2386, pruned_loss=0.03686, over 4908.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2383, pruned_loss=0.04822, over 955027.89 frames. ], batch size: 32, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:34:44,556 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=117408.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:35:14,767 INFO [finetune.py:976] (0/7) Epoch 21, batch 2900, loss[loss=0.1466, simple_loss=0.2211, pruned_loss=0.03603, over 4722.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2404, pruned_loss=0.04919, over 955434.88 frames. ], batch size: 23, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:35:17,801 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117458.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:35:19,564 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.585e+02 1.858e+02 2.394e+02 5.975e+02, threshold=3.717e+02, percent-clipped=5.0 +2023-04-27 15:35:20,447 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 15:36:16,933 INFO [finetune.py:976] (0/7) Epoch 21, batch 2950, loss[loss=0.1744, simple_loss=0.2421, pruned_loss=0.05335, over 4823.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2426, pruned_loss=0.04969, over 955771.10 frames. ], batch size: 33, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:36:17,644 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1321, 1.4218, 1.3609, 1.6500, 1.5457, 1.6703, 1.3028, 3.0043], + device='cuda:0'), covar=tensor([0.0609, 0.0805, 0.0777, 0.1200, 0.0635, 0.0528, 0.0753, 0.0181], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0056], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 15:36:18,222 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=117506.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:37:01,618 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6081, 1.3072, 1.3151, 1.3930, 1.8120, 1.4442, 1.1881, 1.2362], + device='cuda:0'), covar=tensor([0.1622, 0.1406, 0.1906, 0.1442, 0.0850, 0.1567, 0.1886, 0.2373], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0314, 0.0353, 0.0292, 0.0329, 0.0311, 0.0305, 0.0373], + device='cuda:0'), out_proj_covar=tensor([6.4249e-05, 6.5013e-05, 7.4457e-05, 5.8947e-05, 6.7881e-05, 6.5185e-05, + 6.3972e-05, 7.9254e-05], device='cuda:0') +2023-04-27 15:37:23,420 INFO [finetune.py:976] (0/7) Epoch 21, batch 3000, loss[loss=0.1946, simple_loss=0.2752, pruned_loss=0.05698, over 4736.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2442, pruned_loss=0.05006, over 953404.60 frames. ], batch size: 54, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:37:23,421 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 15:37:30,663 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4494, 1.2851, 1.6703, 1.6630, 1.3588, 1.2781, 1.3719, 0.7885], + device='cuda:0'), covar=tensor([0.0557, 0.0615, 0.0414, 0.0507, 0.0771, 0.1208, 0.0546, 0.0588], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0067, 0.0067, 0.0075, 0.0095, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 15:37:43,670 INFO [finetune.py:1010] (0/7) Epoch 21, validation: loss=0.1531, simple_loss=0.2228, pruned_loss=0.04164, over 2265189.00 frames. +2023-04-27 15:37:43,670 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 15:37:46,191 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6643, 2.3736, 2.6349, 3.0703, 2.9432, 2.3586, 2.2155, 2.5037], + device='cuda:0'), covar=tensor([0.0922, 0.0942, 0.0664, 0.0592, 0.0625, 0.0996, 0.0734, 0.0626], + device='cuda:0'), in_proj_covar=tensor([0.0189, 0.0203, 0.0185, 0.0175, 0.0178, 0.0182, 0.0153, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:37:54,338 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.590e+02 1.926e+02 2.493e+02 6.945e+02, threshold=3.852e+02, percent-clipped=2.0 +2023-04-27 15:38:15,509 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117577.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:38:37,405 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2276, 1.9577, 2.1779, 2.5780, 2.5367, 1.9716, 1.8446, 2.1326], + device='cuda:0'), covar=tensor([0.0844, 0.0986, 0.0664, 0.0543, 0.0578, 0.0886, 0.0715, 0.0631], + device='cuda:0'), in_proj_covar=tensor([0.0188, 0.0202, 0.0185, 0.0174, 0.0178, 0.0181, 0.0153, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:38:49,964 INFO [finetune.py:976] (0/7) Epoch 21, batch 3050, loss[loss=0.1817, simple_loss=0.2496, pruned_loss=0.0569, over 4908.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2447, pruned_loss=0.0499, over 954094.05 frames. ], batch size: 37, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:39:24,322 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9556, 2.4851, 1.0019, 1.2653, 1.7910, 1.2106, 2.9526, 1.4672], + device='cuda:0'), covar=tensor([0.0740, 0.0542, 0.0724, 0.1179, 0.0473, 0.0984, 0.0250, 0.0680], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0064, 0.0047, 0.0046, 0.0049, 0.0052, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 15:39:28,406 INFO [finetune.py:976] (0/7) Epoch 21, batch 3100, loss[loss=0.1246, simple_loss=0.1956, pruned_loss=0.02685, over 4827.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2436, pruned_loss=0.04951, over 955318.51 frames. ], batch size: 30, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:39:33,617 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.782e+01 1.629e+02 1.831e+02 2.140e+02 4.594e+02, threshold=3.661e+02, percent-clipped=1.0 +2023-04-27 15:39:48,291 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7769, 1.5748, 1.7245, 2.1377, 2.1195, 1.7416, 1.4686, 1.9253], + device='cuda:0'), covar=tensor([0.0767, 0.1141, 0.0782, 0.0506, 0.0513, 0.0739, 0.0701, 0.0506], + device='cuda:0'), in_proj_covar=tensor([0.0188, 0.0202, 0.0185, 0.0174, 0.0178, 0.0181, 0.0152, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:39:49,466 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=117685.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:40:01,891 INFO [finetune.py:976] (0/7) Epoch 21, batch 3150, loss[loss=0.2024, simple_loss=0.2658, pruned_loss=0.06945, over 4734.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2418, pruned_loss=0.04925, over 954205.76 frames. ], batch size: 59, lr: 3.19e-03, grad_scale: 64.0 +2023-04-27 15:40:19,861 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7314, 1.1684, 1.8312, 2.2224, 1.8261, 1.6625, 1.7626, 1.7190], + device='cuda:0'), covar=tensor([0.4002, 0.6029, 0.4975, 0.4932, 0.5154, 0.6673, 0.6634, 0.7899], + device='cuda:0'), in_proj_covar=tensor([0.0427, 0.0411, 0.0504, 0.0504, 0.0456, 0.0486, 0.0492, 0.0499], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:40:34,883 INFO [finetune.py:976] (0/7) Epoch 21, batch 3200, loss[loss=0.2267, simple_loss=0.2926, pruned_loss=0.08039, over 4799.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2391, pruned_loss=0.04828, over 953274.72 frames. ], batch size: 45, lr: 3.19e-03, grad_scale: 32.0 +2023-04-27 15:40:40,630 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.488e+02 1.740e+02 2.134e+02 4.816e+02, threshold=3.479e+02, percent-clipped=1.0 +2023-04-27 15:41:06,183 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 15:41:31,202 INFO [finetune.py:976] (0/7) Epoch 21, batch 3250, loss[loss=0.1551, simple_loss=0.2306, pruned_loss=0.0398, over 4928.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2393, pruned_loss=0.04863, over 952023.08 frames. ], batch size: 42, lr: 3.19e-03, grad_scale: 32.0 +2023-04-27 15:42:30,385 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9728, 2.5982, 0.9776, 1.3521, 1.8765, 1.2080, 3.3122, 1.6139], + device='cuda:0'), covar=tensor([0.0717, 0.0608, 0.0810, 0.1207, 0.0516, 0.1015, 0.0186, 0.0663], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0009, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 15:42:33,314 INFO [finetune.py:976] (0/7) Epoch 21, batch 3300, loss[loss=0.1752, simple_loss=0.2458, pruned_loss=0.05233, over 4795.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2432, pruned_loss=0.05026, over 953796.23 frames. ], batch size: 29, lr: 3.19e-03, grad_scale: 32.0 +2023-04-27 15:42:45,022 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.642e+02 1.977e+02 2.287e+02 4.163e+02, threshold=3.954e+02, percent-clipped=4.0 +2023-04-27 15:43:01,377 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117877.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:43:18,017 INFO [finetune.py:976] (0/7) Epoch 21, batch 3350, loss[loss=0.146, simple_loss=0.2217, pruned_loss=0.03515, over 4734.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2459, pruned_loss=0.05094, over 953386.54 frames. ], batch size: 23, lr: 3.19e-03, grad_scale: 32.0 +2023-04-27 15:43:37,865 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=117925.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:43:53,884 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8967, 1.5804, 2.1066, 2.2736, 1.9379, 1.8833, 2.0074, 1.9990], + device='cuda:0'), covar=tensor([0.5310, 0.7343, 0.7595, 0.6271, 0.6667, 0.9235, 0.9555, 1.0522], + device='cuda:0'), in_proj_covar=tensor([0.0429, 0.0412, 0.0505, 0.0506, 0.0458, 0.0487, 0.0494, 0.0502], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:44:02,872 INFO [finetune.py:976] (0/7) Epoch 21, batch 3400, loss[loss=0.133, simple_loss=0.2229, pruned_loss=0.02153, over 4766.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2477, pruned_loss=0.05186, over 955516.98 frames. ], batch size: 28, lr: 3.19e-03, grad_scale: 32.0 +2023-04-27 15:44:13,574 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.581e+02 1.978e+02 2.353e+02 4.308e+02, threshold=3.955e+02, percent-clipped=3.0 +2023-04-27 15:44:43,513 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-27 15:44:44,664 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=117985.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:44:54,345 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-118000.pt +2023-04-27 15:44:57,990 INFO [finetune.py:976] (0/7) Epoch 21, batch 3450, loss[loss=0.1862, simple_loss=0.2513, pruned_loss=0.06056, over 4801.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2481, pruned_loss=0.05179, over 955119.07 frames. ], batch size: 40, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:45:17,643 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=118033.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:45:22,440 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118040.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:45:31,327 INFO [finetune.py:976] (0/7) Epoch 21, batch 3500, loss[loss=0.1543, simple_loss=0.2299, pruned_loss=0.03935, over 4764.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2451, pruned_loss=0.05071, over 954521.64 frames. ], batch size: 26, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:45:36,183 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.106e+01 1.494e+02 1.798e+02 2.113e+02 5.768e+02, threshold=3.596e+02, percent-clipped=1.0 +2023-04-27 15:46:03,428 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118101.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:46:05,140 INFO [finetune.py:976] (0/7) Epoch 21, batch 3550, loss[loss=0.16, simple_loss=0.2311, pruned_loss=0.04447, over 4925.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.242, pruned_loss=0.04955, over 954157.10 frames. ], batch size: 42, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:46:20,242 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8632, 2.4836, 1.9459, 1.8244, 1.3379, 1.3943, 2.1014, 1.3603], + device='cuda:0'), covar=tensor([0.1598, 0.1391, 0.1349, 0.1745, 0.2294, 0.1873, 0.0926, 0.1966], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0212, 0.0170, 0.0205, 0.0199, 0.0185, 0.0157, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 15:46:39,043 INFO [finetune.py:976] (0/7) Epoch 21, batch 3600, loss[loss=0.1818, simple_loss=0.2387, pruned_loss=0.06241, over 4906.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2399, pruned_loss=0.04933, over 953204.74 frames. ], batch size: 35, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:46:42,900 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 15:46:43,924 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.483e+02 1.865e+02 2.338e+02 3.870e+02, threshold=3.730e+02, percent-clipped=2.0 +2023-04-27 15:46:47,651 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118168.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:47:39,451 INFO [finetune.py:976] (0/7) Epoch 21, batch 3650, loss[loss=0.1558, simple_loss=0.2403, pruned_loss=0.03563, over 4757.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2411, pruned_loss=0.04946, over 955648.93 frames. ], batch size: 28, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:47:56,870 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-27 15:48:09,452 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118229.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 15:48:10,528 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4050, 1.0053, 0.4140, 1.1075, 1.0456, 1.2779, 1.1947, 1.1584], + device='cuda:0'), covar=tensor([0.0524, 0.0419, 0.0411, 0.0580, 0.0314, 0.0516, 0.0496, 0.0594], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 15:48:43,767 INFO [finetune.py:976] (0/7) Epoch 21, batch 3700, loss[loss=0.1645, simple_loss=0.2344, pruned_loss=0.04734, over 4837.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.244, pruned_loss=0.0503, over 956934.51 frames. ], batch size: 30, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:48:54,005 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.556e+02 1.839e+02 2.205e+02 3.757e+02, threshold=3.678e+02, percent-clipped=1.0 +2023-04-27 15:48:58,374 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118269.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:49:16,507 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118289.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:49:27,415 INFO [finetune.py:976] (0/7) Epoch 21, batch 3750, loss[loss=0.1998, simple_loss=0.2798, pruned_loss=0.05988, over 4890.00 frames. ], tot_loss[loss=0.1758, simple_loss=0.2472, pruned_loss=0.0522, over 954828.54 frames. ], batch size: 43, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:49:43,202 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118329.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:49:43,828 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118330.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:50:04,019 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118350.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:50:12,621 INFO [finetune.py:976] (0/7) Epoch 21, batch 3800, loss[loss=0.1531, simple_loss=0.2344, pruned_loss=0.03589, over 4799.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2475, pruned_loss=0.0517, over 955217.27 frames. ], batch size: 25, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:50:23,127 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.645e+02 2.008e+02 2.325e+02 4.479e+02, threshold=4.015e+02, percent-clipped=4.0 +2023-04-27 15:50:52,600 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118390.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:50:56,203 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118396.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:51:02,177 INFO [finetune.py:976] (0/7) Epoch 21, batch 3850, loss[loss=0.1659, simple_loss=0.2407, pruned_loss=0.04557, over 4823.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2461, pruned_loss=0.05065, over 956027.42 frames. ], batch size: 33, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:51:31,814 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5200, 1.1622, 1.3096, 1.2337, 1.6392, 1.3448, 1.1348, 1.2514], + device='cuda:0'), covar=tensor([0.1387, 0.1290, 0.1701, 0.1391, 0.0793, 0.1327, 0.1750, 0.2099], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0309, 0.0348, 0.0286, 0.0323, 0.0306, 0.0299, 0.0367], + device='cuda:0'), out_proj_covar=tensor([6.3130e-05, 6.3887e-05, 7.3476e-05, 5.7755e-05, 6.6758e-05, 6.4076e-05, + 6.2697e-05, 7.7948e-05], device='cuda:0') +2023-04-27 15:51:35,149 INFO [finetune.py:976] (0/7) Epoch 21, batch 3900, loss[loss=0.1705, simple_loss=0.2408, pruned_loss=0.05012, over 4815.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2443, pruned_loss=0.05081, over 955959.82 frames. ], batch size: 39, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:51:40,430 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.017e+02 1.638e+02 1.891e+02 2.265e+02 7.777e+02, threshold=3.782e+02, percent-clipped=1.0 +2023-04-27 15:51:47,812 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5602, 1.3538, 1.7423, 1.8488, 1.3821, 1.0861, 1.3376, 0.8029], + device='cuda:0'), covar=tensor([0.0505, 0.0643, 0.0397, 0.0506, 0.0687, 0.1300, 0.0644, 0.0663], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0066, 0.0067, 0.0074, 0.0095, 0.0072, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 15:52:07,427 INFO [finetune.py:976] (0/7) Epoch 21, batch 3950, loss[loss=0.1572, simple_loss=0.223, pruned_loss=0.04576, over 4876.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2417, pruned_loss=0.04988, over 957171.72 frames. ], batch size: 34, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:52:21,104 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118524.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 15:52:24,381 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-27 15:52:40,872 INFO [finetune.py:976] (0/7) Epoch 21, batch 4000, loss[loss=0.1195, simple_loss=0.1846, pruned_loss=0.02718, over 4829.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.241, pruned_loss=0.05011, over 956782.54 frames. ], batch size: 25, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:52:47,269 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.678e+01 1.545e+02 1.901e+02 2.367e+02 3.489e+02, threshold=3.803e+02, percent-clipped=0.0 +2023-04-27 15:53:10,553 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.51 vs. limit=5.0 +2023-04-27 15:53:30,868 INFO [finetune.py:976] (0/7) Epoch 21, batch 4050, loss[loss=0.1597, simple_loss=0.2238, pruned_loss=0.04782, over 4657.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2433, pruned_loss=0.05083, over 955763.34 frames. ], batch size: 23, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:53:31,593 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7450, 2.1646, 0.9835, 1.4909, 2.0082, 1.6255, 1.5427, 1.7005], + device='cuda:0'), covar=tensor([0.0500, 0.0343, 0.0340, 0.0566, 0.0270, 0.0542, 0.0524, 0.0582], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 15:53:55,022 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2839, 1.5770, 1.6464, 1.8272, 1.6825, 1.7821, 1.6997, 1.7279], + device='cuda:0'), covar=tensor([0.4041, 0.5258, 0.4218, 0.4220, 0.5278, 0.6882, 0.5498, 0.4651], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0374, 0.0324, 0.0337, 0.0348, 0.0395, 0.0357, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 15:53:56,770 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118625.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:54:06,596 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=118631.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:54:26,638 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118645.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:54:33,133 INFO [finetune.py:976] (0/7) Epoch 21, batch 4100, loss[loss=0.1541, simple_loss=0.2307, pruned_loss=0.03878, over 4840.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2464, pruned_loss=0.05181, over 955872.77 frames. ], batch size: 30, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:54:38,509 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.657e+02 1.981e+02 2.296e+02 3.754e+02, threshold=3.963e+02, percent-clipped=0.0 +2023-04-27 15:54:54,352 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118685.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:54:58,635 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=118692.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:55:00,963 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118696.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:55:06,700 INFO [finetune.py:976] (0/7) Epoch 21, batch 4150, loss[loss=0.1684, simple_loss=0.2532, pruned_loss=0.04182, over 4776.00 frames. ], tot_loss[loss=0.1759, simple_loss=0.2476, pruned_loss=0.05205, over 954389.87 frames. ], batch size: 29, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:55:48,372 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2310, 1.5081, 1.4867, 1.8191, 1.7501, 1.8796, 1.4020, 3.6298], + device='cuda:0'), covar=tensor([0.0624, 0.0810, 0.0791, 0.1233, 0.0631, 0.0505, 0.0792, 0.0140], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 15:55:56,004 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=118744.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:56:07,490 INFO [finetune.py:976] (0/7) Epoch 21, batch 4200, loss[loss=0.1575, simple_loss=0.2273, pruned_loss=0.04387, over 4898.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2474, pruned_loss=0.05138, over 955143.94 frames. ], batch size: 35, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:56:19,458 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.047e+02 1.568e+02 1.859e+02 2.228e+02 3.642e+02, threshold=3.719e+02, percent-clipped=0.0 +2023-04-27 15:56:20,817 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6008, 3.0229, 2.4056, 2.4604, 2.7262, 2.3527, 3.8705, 1.9480], + device='cuda:0'), covar=tensor([0.3409, 0.1719, 0.3555, 0.3321, 0.1811, 0.2633, 0.1035, 0.4156], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0347, 0.0422, 0.0351, 0.0378, 0.0373, 0.0367, 0.0415], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:56:55,011 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2096, 1.9771, 2.1106, 2.4593, 2.4303, 1.9621, 1.6370, 2.2206], + device='cuda:0'), covar=tensor([0.0777, 0.1052, 0.0658, 0.0590, 0.0582, 0.0795, 0.0820, 0.0585], + device='cuda:0'), in_proj_covar=tensor([0.0188, 0.0203, 0.0185, 0.0174, 0.0179, 0.0181, 0.0153, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:56:58,125 INFO [finetune.py:976] (0/7) Epoch 21, batch 4250, loss[loss=0.1837, simple_loss=0.2478, pruned_loss=0.05984, over 4900.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2449, pruned_loss=0.05083, over 954393.15 frames. ], batch size: 32, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:57:04,079 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 15:57:13,417 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118824.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:57:32,157 INFO [finetune.py:976] (0/7) Epoch 21, batch 4300, loss[loss=0.2081, simple_loss=0.2784, pruned_loss=0.06885, over 4795.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2427, pruned_loss=0.05048, over 951836.20 frames. ], batch size: 29, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:57:37,516 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.744e+01 1.518e+02 1.726e+02 2.140e+02 3.725e+02, threshold=3.451e+02, percent-clipped=1.0 +2023-04-27 15:57:44,619 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=118872.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:58:06,089 INFO [finetune.py:976] (0/7) Epoch 21, batch 4350, loss[loss=0.1531, simple_loss=0.2208, pruned_loss=0.04273, over 4715.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2393, pruned_loss=0.0491, over 954329.61 frames. ], batch size: 59, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:58:20,430 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118925.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:58:39,680 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118945.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:58:51,396 INFO [finetune.py:976] (0/7) Epoch 21, batch 4400, loss[loss=0.2039, simple_loss=0.2674, pruned_loss=0.0702, over 4903.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2402, pruned_loss=0.04949, over 953702.21 frames. ], batch size: 43, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:59:00,776 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.017e+02 1.550e+02 1.866e+02 2.301e+02 5.364e+02, threshold=3.732e+02, percent-clipped=5.0 +2023-04-27 15:59:12,422 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9404, 1.3277, 3.2692, 3.0123, 2.8979, 3.1922, 3.2177, 2.9038], + device='cuda:0'), covar=tensor([0.7655, 0.5367, 0.1547, 0.2435, 0.1750, 0.2431, 0.1650, 0.1913], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0306, 0.0404, 0.0403, 0.0346, 0.0408, 0.0310, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 15:59:14,264 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=118973.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:59:21,739 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.2461, 4.1741, 2.9241, 4.9107, 4.2709, 4.2403, 1.8850, 4.2618], + device='cuda:0'), covar=tensor([0.1767, 0.1051, 0.3990, 0.0975, 0.2874, 0.1756, 0.5589, 0.2148], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0217, 0.0253, 0.0307, 0.0298, 0.0247, 0.0278, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 15:59:35,157 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=118985.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:59:36,312 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=118987.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:59:45,370 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=118993.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 15:59:52,142 INFO [finetune.py:976] (0/7) Epoch 21, batch 4450, loss[loss=0.1994, simple_loss=0.2748, pruned_loss=0.06203, over 4907.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2436, pruned_loss=0.05028, over 952415.26 frames. ], batch size: 43, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 15:59:59,824 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-04-27 16:00:11,985 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=119033.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:00:25,716 INFO [finetune.py:976] (0/7) Epoch 21, batch 4500, loss[loss=0.2095, simple_loss=0.2781, pruned_loss=0.07044, over 4891.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2451, pruned_loss=0.05102, over 951707.90 frames. ], batch size: 43, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 16:00:30,591 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.788e+02 2.087e+02 2.542e+02 6.471e+02, threshold=4.174e+02, percent-clipped=4.0 +2023-04-27 16:00:39,545 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119075.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:00:44,937 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-27 16:00:58,648 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119093.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:01:15,262 INFO [finetune.py:976] (0/7) Epoch 21, batch 4550, loss[loss=0.1946, simple_loss=0.2679, pruned_loss=0.06067, over 4935.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2464, pruned_loss=0.05137, over 951472.49 frames. ], batch size: 41, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 16:01:53,190 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9450, 2.3016, 1.9413, 2.2122, 1.4853, 1.8227, 1.9881, 1.4619], + device='cuda:0'), covar=tensor([0.1755, 0.1128, 0.0905, 0.1160, 0.3745, 0.1272, 0.1726, 0.2669], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0296, 0.0213, 0.0274, 0.0309, 0.0251, 0.0244, 0.0261], + device='cuda:0'), out_proj_covar=tensor([1.1309e-04, 1.1721e-04, 8.4297e-05, 1.0865e-04, 1.2543e-04, 9.9553e-05, + 9.8597e-05, 1.0348e-04], device='cuda:0') +2023-04-27 16:01:53,806 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119136.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:02:16,872 INFO [finetune.py:976] (0/7) Epoch 21, batch 4600, loss[loss=0.1547, simple_loss=0.2201, pruned_loss=0.04461, over 4805.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2456, pruned_loss=0.05072, over 952867.43 frames. ], batch size: 40, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 16:02:17,008 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119154.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 16:02:21,811 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.477e+02 1.832e+02 2.213e+02 3.294e+02, threshold=3.663e+02, percent-clipped=0.0 +2023-04-27 16:02:46,173 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.75 vs. limit=5.0 +2023-04-27 16:02:49,206 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6620, 1.7046, 0.6194, 1.3289, 1.9328, 1.5388, 1.4516, 1.5022], + device='cuda:0'), covar=tensor([0.0459, 0.0369, 0.0355, 0.0546, 0.0260, 0.0505, 0.0469, 0.0550], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 16:02:50,908 INFO [finetune.py:976] (0/7) Epoch 21, batch 4650, loss[loss=0.1964, simple_loss=0.2565, pruned_loss=0.0682, over 4825.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.243, pruned_loss=0.05005, over 953566.81 frames. ], batch size: 30, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 16:02:55,938 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 16:03:14,891 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7517, 1.8662, 0.9589, 1.4556, 1.8402, 1.6048, 1.5367, 1.6175], + device='cuda:0'), covar=tensor([0.0506, 0.0352, 0.0333, 0.0545, 0.0267, 0.0493, 0.0494, 0.0567], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0029, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 16:03:24,687 INFO [finetune.py:976] (0/7) Epoch 21, batch 4700, loss[loss=0.1678, simple_loss=0.2297, pruned_loss=0.05298, over 4832.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2404, pruned_loss=0.04925, over 956612.07 frames. ], batch size: 33, lr: 3.18e-03, grad_scale: 32.0 +2023-04-27 16:03:29,613 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.111e+02 1.611e+02 1.939e+02 2.351e+02 3.791e+02, threshold=3.879e+02, percent-clipped=1.0 +2023-04-27 16:03:46,014 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119287.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:03:48,911 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2094, 2.5345, 0.8491, 1.5041, 1.5722, 1.8906, 1.5776, 0.8525], + device='cuda:0'), covar=tensor([0.1491, 0.1119, 0.1810, 0.1321, 0.1149, 0.0955, 0.1684, 0.1840], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0239, 0.0136, 0.0119, 0.0133, 0.0152, 0.0116, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 16:03:58,705 INFO [finetune.py:976] (0/7) Epoch 21, batch 4750, loss[loss=0.1825, simple_loss=0.2557, pruned_loss=0.05467, over 4741.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.237, pruned_loss=0.04767, over 953889.75 frames. ], batch size: 54, lr: 3.17e-03, grad_scale: 32.0 +2023-04-27 16:04:07,786 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4983, 2.4963, 1.8543, 2.0947, 2.5764, 2.0008, 3.2284, 1.7416], + device='cuda:0'), covar=tensor([0.3687, 0.2278, 0.4699, 0.3589, 0.1746, 0.2904, 0.1960, 0.4723], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0350, 0.0425, 0.0354, 0.0381, 0.0375, 0.0368, 0.0417], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:04:39,488 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=119335.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:04:54,473 INFO [finetune.py:976] (0/7) Epoch 21, batch 4800, loss[loss=0.1314, simple_loss=0.2061, pruned_loss=0.02832, over 4722.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2394, pruned_loss=0.04875, over 954884.17 frames. ], batch size: 23, lr: 3.17e-03, grad_scale: 32.0 +2023-04-27 16:04:59,370 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.005e+02 1.524e+02 1.812e+02 2.153e+02 4.144e+02, threshold=3.624e+02, percent-clipped=1.0 +2023-04-27 16:05:26,790 INFO [finetune.py:976] (0/7) Epoch 21, batch 4850, loss[loss=0.1587, simple_loss=0.2397, pruned_loss=0.03883, over 4831.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2441, pruned_loss=0.05027, over 955646.57 frames. ], batch size: 47, lr: 3.17e-03, grad_scale: 32.0 +2023-04-27 16:05:43,545 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119431.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:05:55,507 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119449.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 16:05:58,988 INFO [finetune.py:976] (0/7) Epoch 21, batch 4900, loss[loss=0.198, simple_loss=0.2542, pruned_loss=0.07089, over 4165.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2456, pruned_loss=0.05083, over 953599.57 frames. ], batch size: 18, lr: 3.17e-03, grad_scale: 32.0 +2023-04-27 16:06:04,797 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.637e+02 1.876e+02 2.288e+02 4.482e+02, threshold=3.752e+02, percent-clipped=1.0 +2023-04-27 16:06:35,841 INFO [finetune.py:976] (0/7) Epoch 21, batch 4950, loss[loss=0.1874, simple_loss=0.2579, pruned_loss=0.05845, over 4892.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2463, pruned_loss=0.05026, over 953678.17 frames. ], batch size: 35, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:06:49,591 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119514.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:07:11,840 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8398, 1.2128, 4.8308, 4.5363, 4.1445, 4.6535, 4.2888, 4.2450], + device='cuda:0'), covar=tensor([0.6731, 0.6336, 0.0910, 0.1566, 0.1107, 0.1136, 0.1795, 0.1447], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0304, 0.0403, 0.0402, 0.0345, 0.0406, 0.0308, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:07:42,892 INFO [finetune.py:976] (0/7) Epoch 21, batch 5000, loss[loss=0.1423, simple_loss=0.2063, pruned_loss=0.03915, over 4891.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2441, pruned_loss=0.04956, over 953149.96 frames. ], batch size: 32, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:07:44,859 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2495, 2.8871, 2.3340, 2.3523, 1.7529, 1.7559, 2.4136, 1.7757], + device='cuda:0'), covar=tensor([0.1401, 0.1289, 0.1185, 0.1348, 0.2033, 0.1736, 0.0847, 0.1671], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0211, 0.0168, 0.0203, 0.0199, 0.0184, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 16:07:56,239 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.721e+01 1.533e+02 1.932e+02 2.312e+02 5.244e+02, threshold=3.864e+02, percent-clipped=4.0 +2023-04-27 16:08:15,219 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119575.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:08:44,470 INFO [finetune.py:976] (0/7) Epoch 21, batch 5050, loss[loss=0.1799, simple_loss=0.2484, pruned_loss=0.05567, over 4887.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2421, pruned_loss=0.0492, over 951596.19 frames. ], batch size: 35, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:08:56,763 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.6586, 4.5240, 3.2512, 5.3577, 4.6924, 4.6928, 1.9757, 4.5785], + device='cuda:0'), covar=tensor([0.1645, 0.0952, 0.3155, 0.0837, 0.2370, 0.1453, 0.5959, 0.2062], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0216, 0.0253, 0.0305, 0.0296, 0.0246, 0.0276, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 16:09:28,917 INFO [finetune.py:976] (0/7) Epoch 21, batch 5100, loss[loss=0.166, simple_loss=0.2328, pruned_loss=0.04954, over 4764.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2383, pruned_loss=0.04792, over 952955.68 frames. ], batch size: 27, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:09:41,675 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.943e+01 1.499e+02 1.766e+02 2.161e+02 3.760e+02, threshold=3.532e+02, percent-clipped=0.0 +2023-04-27 16:09:43,039 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=119665.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:10:20,951 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-27 16:10:31,240 INFO [finetune.py:976] (0/7) Epoch 21, batch 5150, loss[loss=0.1669, simple_loss=0.2443, pruned_loss=0.04469, over 4764.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2377, pruned_loss=0.04768, over 950960.49 frames. ], batch size: 28, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:11:05,056 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=119726.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:11:08,008 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119731.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:11:17,118 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0183, 1.7230, 2.1961, 2.4976, 2.0839, 1.9828, 2.0970, 2.0842], + device='cuda:0'), covar=tensor([0.4897, 0.7439, 0.7603, 0.6119, 0.6436, 0.8623, 0.8968, 0.9899], + device='cuda:0'), in_proj_covar=tensor([0.0427, 0.0410, 0.0501, 0.0501, 0.0456, 0.0484, 0.0492, 0.0500], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:11:30,488 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=119749.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 16:11:33,436 INFO [finetune.py:976] (0/7) Epoch 21, batch 5200, loss[loss=0.1921, simple_loss=0.265, pruned_loss=0.0596, over 4814.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2411, pruned_loss=0.04824, over 953086.20 frames. ], batch size: 40, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:11:39,368 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.908e+01 1.576e+02 2.065e+02 2.326e+02 4.790e+02, threshold=4.130e+02, percent-clipped=1.0 +2023-04-27 16:11:50,732 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.81 vs. limit=5.0 +2023-04-27 16:11:51,109 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=119779.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:12:02,197 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=119797.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:12:06,570 INFO [finetune.py:976] (0/7) Epoch 21, batch 5250, loss[loss=0.183, simple_loss=0.2575, pruned_loss=0.05425, over 4919.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2428, pruned_loss=0.04857, over 950089.21 frames. ], batch size: 36, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:12:51,200 INFO [finetune.py:976] (0/7) Epoch 21, batch 5300, loss[loss=0.2222, simple_loss=0.2984, pruned_loss=0.07297, over 4891.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2466, pruned_loss=0.05021, over 951086.60 frames. ], batch size: 36, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:13:00,130 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.89 vs. limit=5.0 +2023-04-27 16:13:02,330 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.110e+02 1.628e+02 1.910e+02 2.445e+02 5.972e+02, threshold=3.820e+02, percent-clipped=3.0 +2023-04-27 16:13:13,808 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=119870.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:13:53,928 INFO [finetune.py:976] (0/7) Epoch 21, batch 5350, loss[loss=0.1858, simple_loss=0.245, pruned_loss=0.06326, over 4792.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2467, pruned_loss=0.05027, over 950906.80 frames. ], batch size: 25, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:14:17,478 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2206, 1.9580, 2.4007, 2.7945, 2.2507, 2.1470, 2.3316, 2.2305], + device='cuda:0'), covar=tensor([0.4728, 0.7152, 0.7152, 0.5418, 0.7073, 0.8807, 0.8304, 0.9342], + device='cuda:0'), in_proj_covar=tensor([0.0429, 0.0412, 0.0503, 0.0503, 0.0457, 0.0487, 0.0494, 0.0502], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:14:43,494 INFO [finetune.py:976] (0/7) Epoch 21, batch 5400, loss[loss=0.1554, simple_loss=0.2298, pruned_loss=0.04047, over 4872.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2431, pruned_loss=0.04895, over 951520.30 frames. ], batch size: 34, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:14:48,952 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.397e+02 1.683e+02 2.017e+02 3.693e+02, threshold=3.366e+02, percent-clipped=0.0 +2023-04-27 16:14:58,556 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0618, 2.0996, 1.9852, 1.6848, 2.2699, 1.6770, 2.8457, 1.7263], + device='cuda:0'), covar=tensor([0.3085, 0.1770, 0.4025, 0.2624, 0.1354, 0.2492, 0.0947, 0.4054], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0352, 0.0430, 0.0356, 0.0385, 0.0379, 0.0372, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:15:14,482 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-120000.pt +2023-04-27 16:15:18,156 INFO [finetune.py:976] (0/7) Epoch 21, batch 5450, loss[loss=0.1732, simple_loss=0.2474, pruned_loss=0.04949, over 4859.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2398, pruned_loss=0.04849, over 950627.01 frames. ], batch size: 49, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:15:28,588 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=120021.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:16:03,188 INFO [finetune.py:976] (0/7) Epoch 21, batch 5500, loss[loss=0.2268, simple_loss=0.2955, pruned_loss=0.07907, over 4349.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2371, pruned_loss=0.04765, over 948592.92 frames. ], batch size: 65, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:16:14,181 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.019e+02 1.665e+02 1.823e+02 2.184e+02 3.265e+02, threshold=3.646e+02, percent-clipped=0.0 +2023-04-27 16:17:07,727 INFO [finetune.py:976] (0/7) Epoch 21, batch 5550, loss[loss=0.1708, simple_loss=0.2586, pruned_loss=0.04147, over 4863.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2396, pruned_loss=0.04857, over 947365.74 frames. ], batch size: 44, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:18:05,776 INFO [finetune.py:976] (0/7) Epoch 21, batch 5600, loss[loss=0.1698, simple_loss=0.2531, pruned_loss=0.04325, over 4926.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2437, pruned_loss=0.04992, over 947976.38 frames. ], batch size: 33, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:18:11,015 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.536e+02 1.871e+02 2.277e+02 4.229e+02, threshold=3.743e+02, percent-clipped=5.0 +2023-04-27 16:18:15,190 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=120170.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:18:15,850 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3101, 1.6754, 2.2103, 2.6781, 2.1551, 1.7131, 1.4313, 2.0154], + device='cuda:0'), covar=tensor([0.3002, 0.3113, 0.1606, 0.2097, 0.2477, 0.2568, 0.3927, 0.1934], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0243, 0.0225, 0.0313, 0.0218, 0.0232, 0.0227, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 16:18:23,687 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6439, 3.9401, 0.8591, 2.1374, 1.9914, 2.6908, 2.2737, 0.8273], + device='cuda:0'), covar=tensor([0.1429, 0.0835, 0.2182, 0.1178, 0.1201, 0.1045, 0.1574, 0.2321], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0240, 0.0137, 0.0119, 0.0133, 0.0152, 0.0116, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 16:18:33,847 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-27 16:18:36,563 INFO [finetune.py:976] (0/7) Epoch 21, batch 5650, loss[loss=0.2085, simple_loss=0.2924, pruned_loss=0.06226, over 4931.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2475, pruned_loss=0.05105, over 947483.63 frames. ], batch size: 38, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:18:45,335 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=120218.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:19:30,419 INFO [finetune.py:976] (0/7) Epoch 21, batch 5700, loss[loss=0.159, simple_loss=0.2109, pruned_loss=0.05353, over 4183.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2431, pruned_loss=0.05037, over 929473.21 frames. ], batch size: 18, lr: 3.17e-03, grad_scale: 16.0 +2023-04-27 16:19:40,582 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1060, 1.6711, 2.2042, 2.4892, 2.2119, 2.1427, 2.1569, 2.0609], + device='cuda:0'), covar=tensor([0.4320, 0.6034, 0.5490, 0.5328, 0.5409, 0.6767, 0.7303, 0.7732], + device='cuda:0'), in_proj_covar=tensor([0.0430, 0.0412, 0.0504, 0.0505, 0.0458, 0.0488, 0.0495, 0.0502], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:19:41,633 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.050e+02 1.444e+02 1.777e+02 2.267e+02 3.469e+02, threshold=3.555e+02, percent-clipped=0.0 +2023-04-27 16:19:58,945 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-21.pt +2023-04-27 16:20:17,302 INFO [finetune.py:976] (0/7) Epoch 22, batch 0, loss[loss=0.1855, simple_loss=0.257, pruned_loss=0.05701, over 4838.00 frames. ], tot_loss[loss=0.1855, simple_loss=0.257, pruned_loss=0.05701, over 4838.00 frames. ], batch size: 47, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:20:17,303 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 16:20:33,673 INFO [finetune.py:1010] (0/7) Epoch 22, validation: loss=0.1546, simple_loss=0.2251, pruned_loss=0.04204, over 2265189.00 frames. +2023-04-27 16:20:33,674 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 16:20:37,879 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0952, 1.3596, 1.6600, 2.5244, 2.5463, 1.8999, 1.6263, 2.1973], + device='cuda:0'), covar=tensor([0.0963, 0.1765, 0.1079, 0.0599, 0.0602, 0.1129, 0.0963, 0.0670], + device='cuda:0'), in_proj_covar=tensor([0.0187, 0.0201, 0.0183, 0.0174, 0.0177, 0.0180, 0.0152, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:20:59,171 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=120321.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:21:06,360 INFO [finetune.py:976] (0/7) Epoch 22, batch 50, loss[loss=0.1701, simple_loss=0.2383, pruned_loss=0.05092, over 4922.00 frames. ], tot_loss[loss=0.1787, simple_loss=0.2495, pruned_loss=0.05393, over 215914.27 frames. ], batch size: 33, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:21:27,311 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.523e+01 1.622e+02 1.941e+02 2.362e+02 4.057e+02, threshold=3.882e+02, percent-clipped=2.0 +2023-04-27 16:21:31,095 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=120369.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:21:51,383 INFO [finetune.py:976] (0/7) Epoch 22, batch 100, loss[loss=0.1859, simple_loss=0.25, pruned_loss=0.06086, over 4918.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2425, pruned_loss=0.05101, over 380376.37 frames. ], batch size: 33, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:22:05,041 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8491, 2.1093, 2.0802, 2.2434, 1.9725, 2.1560, 2.0966, 2.0839], + device='cuda:0'), covar=tensor([0.4140, 0.5945, 0.4721, 0.4429, 0.5857, 0.7070, 0.6556, 0.5676], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0374, 0.0323, 0.0338, 0.0347, 0.0394, 0.0358, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 16:22:55,501 INFO [finetune.py:976] (0/7) Epoch 22, batch 150, loss[loss=0.1769, simple_loss=0.2221, pruned_loss=0.06583, over 4033.00 frames. ], tot_loss[loss=0.169, simple_loss=0.238, pruned_loss=0.05001, over 504493.26 frames. ], batch size: 17, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:22:56,177 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8980, 2.8256, 2.3222, 3.2960, 2.9224, 2.9094, 1.2074, 2.8243], + device='cuda:0'), covar=tensor([0.2254, 0.1843, 0.3065, 0.3019, 0.3332, 0.2200, 0.5589, 0.2863], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0214, 0.0249, 0.0302, 0.0294, 0.0244, 0.0273, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 16:23:29,004 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.065e+02 1.459e+02 1.791e+02 2.121e+02 3.336e+02, threshold=3.582e+02, percent-clipped=0.0 +2023-04-27 16:23:57,906 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-27 16:23:58,242 INFO [finetune.py:976] (0/7) Epoch 22, batch 200, loss[loss=0.1542, simple_loss=0.2355, pruned_loss=0.03646, over 4909.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.236, pruned_loss=0.04934, over 605964.77 frames. ], batch size: 37, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:25:06,005 INFO [finetune.py:976] (0/7) Epoch 22, batch 250, loss[loss=0.1917, simple_loss=0.2706, pruned_loss=0.05643, over 4815.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2404, pruned_loss=0.05027, over 685333.73 frames. ], batch size: 38, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:25:37,602 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-27 16:25:49,442 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.595e+02 1.873e+02 2.274e+02 4.484e+02, threshold=3.746e+02, percent-clipped=2.0 +2023-04-27 16:26:12,261 INFO [finetune.py:976] (0/7) Epoch 22, batch 300, loss[loss=0.1544, simple_loss=0.2307, pruned_loss=0.039, over 4923.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2439, pruned_loss=0.05078, over 745538.83 frames. ], batch size: 33, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:27:06,864 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6480, 1.6721, 1.6204, 1.2619, 1.7814, 1.4928, 2.2190, 1.4130], + device='cuda:0'), covar=tensor([0.3691, 0.1920, 0.5068, 0.2962, 0.1524, 0.2248, 0.1545, 0.4682], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0351, 0.0427, 0.0354, 0.0382, 0.0374, 0.0369, 0.0419], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:27:19,462 INFO [finetune.py:976] (0/7) Epoch 22, batch 350, loss[loss=0.1325, simple_loss=0.2086, pruned_loss=0.02823, over 4771.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2458, pruned_loss=0.05157, over 792218.89 frames. ], batch size: 27, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:28:01,876 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.016e+02 1.528e+02 1.802e+02 2.377e+02 4.881e+02, threshold=3.603e+02, percent-clipped=3.0 +2023-04-27 16:28:20,628 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=120676.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:28:24,207 INFO [finetune.py:976] (0/7) Epoch 22, batch 400, loss[loss=0.1575, simple_loss=0.2337, pruned_loss=0.04063, over 4773.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2468, pruned_loss=0.05145, over 828449.74 frames. ], batch size: 51, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:28:44,122 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7968, 2.4928, 1.7046, 1.8109, 1.3200, 1.3861, 1.7543, 1.2569], + device='cuda:0'), covar=tensor([0.2121, 0.1474, 0.1766, 0.1923, 0.2766, 0.2437, 0.1123, 0.2279], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0209, 0.0168, 0.0202, 0.0198, 0.0184, 0.0154, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 16:29:32,132 INFO [finetune.py:976] (0/7) Epoch 22, batch 450, loss[loss=0.1573, simple_loss=0.2287, pruned_loss=0.04298, over 4876.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2452, pruned_loss=0.05022, over 857669.44 frames. ], batch size: 31, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:29:40,495 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=120737.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:29:56,711 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5632, 2.2097, 2.5502, 2.9017, 2.8819, 2.4253, 2.0806, 2.5561], + device='cuda:0'), covar=tensor([0.0786, 0.0967, 0.0584, 0.0515, 0.0591, 0.0906, 0.0725, 0.0533], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0198, 0.0181, 0.0172, 0.0175, 0.0177, 0.0149, 0.0175], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:30:03,216 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4155, 1.7379, 1.8945, 1.9897, 1.8373, 1.8758, 1.9530, 1.9432], + device='cuda:0'), covar=tensor([0.4492, 0.5324, 0.4245, 0.4224, 0.5351, 0.7022, 0.5024, 0.4654], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0372, 0.0323, 0.0337, 0.0347, 0.0394, 0.0356, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 16:30:03,663 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.146e+02 1.511e+02 1.855e+02 2.296e+02 3.408e+02, threshold=3.711e+02, percent-clipped=0.0 +2023-04-27 16:30:15,270 INFO [finetune.py:976] (0/7) Epoch 22, batch 500, loss[loss=0.1455, simple_loss=0.2075, pruned_loss=0.04178, over 4821.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2434, pruned_loss=0.05006, over 879030.66 frames. ], batch size: 41, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:30:22,587 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7065, 1.0144, 1.3228, 1.3230, 1.7360, 1.4296, 1.1047, 1.2652], + device='cuda:0'), covar=tensor([0.1980, 0.2060, 0.2161, 0.1735, 0.1191, 0.1947, 0.2959, 0.2941], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0310, 0.0349, 0.0287, 0.0324, 0.0308, 0.0300, 0.0370], + device='cuda:0'), out_proj_covar=tensor([6.3866e-05, 6.4161e-05, 7.3787e-05, 5.7746e-05, 6.6868e-05, 6.4541e-05, + 6.2753e-05, 7.8634e-05], device='cuda:0') +2023-04-27 16:30:28,274 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-27 16:30:49,308 INFO [finetune.py:976] (0/7) Epoch 22, batch 550, loss[loss=0.1879, simple_loss=0.257, pruned_loss=0.0594, over 4793.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2413, pruned_loss=0.04979, over 895843.66 frames. ], batch size: 29, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:30:59,016 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9060, 1.1941, 4.6612, 4.3965, 3.9808, 4.4129, 4.1473, 4.0464], + device='cuda:0'), covar=tensor([0.7143, 0.6534, 0.1030, 0.1622, 0.1113, 0.1902, 0.2248, 0.1678], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0307, 0.0409, 0.0408, 0.0350, 0.0412, 0.0314, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:31:15,724 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.143e+02 1.573e+02 1.876e+02 2.291e+02 4.467e+02, threshold=3.751e+02, percent-clipped=2.0 +2023-04-27 16:31:38,950 INFO [finetune.py:976] (0/7) Epoch 22, batch 600, loss[loss=0.1983, simple_loss=0.2677, pruned_loss=0.0644, over 4907.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.241, pruned_loss=0.04981, over 907622.70 frames. ], batch size: 46, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:32:45,425 INFO [finetune.py:976] (0/7) Epoch 22, batch 650, loss[loss=0.2214, simple_loss=0.282, pruned_loss=0.08041, over 4893.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2444, pruned_loss=0.05067, over 918775.40 frames. ], batch size: 32, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:33:26,706 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.656e+02 1.853e+02 2.297e+02 3.999e+02, threshold=3.705e+02, percent-clipped=2.0 +2023-04-27 16:33:52,002 INFO [finetune.py:976] (0/7) Epoch 22, batch 700, loss[loss=0.1763, simple_loss=0.2513, pruned_loss=0.05061, over 4825.00 frames. ], tot_loss[loss=0.1752, simple_loss=0.2463, pruned_loss=0.05209, over 924594.26 frames. ], batch size: 39, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:34:21,031 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121003.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:34:45,577 INFO [finetune.py:976] (0/7) Epoch 22, batch 750, loss[loss=0.1654, simple_loss=0.2452, pruned_loss=0.04279, over 4780.00 frames. ], tot_loss[loss=0.1773, simple_loss=0.2483, pruned_loss=0.05317, over 930971.14 frames. ], batch size: 25, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:34:45,650 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121032.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:34:48,216 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3607, 1.2359, 1.5581, 1.5826, 1.2686, 1.1416, 1.3612, 0.8573], + device='cuda:0'), covar=tensor([0.0548, 0.0593, 0.0396, 0.0495, 0.0673, 0.0969, 0.0544, 0.0532], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0068, 0.0066, 0.0067, 0.0074, 0.0096, 0.0072, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 16:35:00,243 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3105, 1.7271, 2.2483, 2.5817, 2.2378, 1.8191, 1.4542, 1.9608], + device='cuda:0'), covar=tensor([0.3480, 0.3176, 0.1653, 0.2396, 0.2570, 0.2710, 0.4041, 0.1954], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0244, 0.0226, 0.0313, 0.0220, 0.0233, 0.0228, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 16:35:03,871 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-04-27 16:35:04,853 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.944e+01 1.522e+02 1.865e+02 2.464e+02 7.582e+02, threshold=3.731e+02, percent-clipped=3.0 +2023-04-27 16:35:05,575 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121064.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:35:15,315 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 16:35:19,167 INFO [finetune.py:976] (0/7) Epoch 22, batch 800, loss[loss=0.1284, simple_loss=0.1997, pruned_loss=0.02849, over 4812.00 frames. ], tot_loss[loss=0.1751, simple_loss=0.2468, pruned_loss=0.05171, over 936028.35 frames. ], batch size: 25, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:35:37,495 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1726, 2.7387, 1.2084, 1.5436, 2.3440, 1.3395, 3.7495, 2.0738], + device='cuda:0'), covar=tensor([0.0684, 0.0634, 0.0882, 0.1245, 0.0477, 0.1019, 0.0205, 0.0611], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0052, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 16:35:52,465 INFO [finetune.py:976] (0/7) Epoch 22, batch 850, loss[loss=0.1677, simple_loss=0.2199, pruned_loss=0.05774, over 4007.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2442, pruned_loss=0.05091, over 940111.73 frames. ], batch size: 17, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:36:11,704 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.009e+02 1.493e+02 1.782e+02 2.235e+02 3.771e+02, threshold=3.565e+02, percent-clipped=1.0 +2023-04-27 16:36:12,477 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121164.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:36:25,801 INFO [finetune.py:976] (0/7) Epoch 22, batch 900, loss[loss=0.1969, simple_loss=0.2624, pruned_loss=0.06568, over 4909.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2433, pruned_loss=0.05087, over 943863.35 frames. ], batch size: 46, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:36:34,903 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2614, 2.9610, 0.8928, 1.7353, 1.6657, 2.1205, 1.7547, 0.9586], + device='cuda:0'), covar=tensor([0.1343, 0.0970, 0.1823, 0.1184, 0.1114, 0.0964, 0.1373, 0.1889], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0241, 0.0138, 0.0119, 0.0133, 0.0153, 0.0116, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 16:36:51,858 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5409, 3.3213, 0.8866, 1.7766, 1.8090, 2.4023, 1.8490, 1.0774], + device='cuda:0'), covar=tensor([0.1443, 0.1002, 0.2176, 0.1352, 0.1148, 0.1067, 0.1553, 0.2135], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0242, 0.0138, 0.0120, 0.0133, 0.0154, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 16:36:52,966 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121225.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:36:59,097 INFO [finetune.py:976] (0/7) Epoch 22, batch 950, loss[loss=0.1644, simple_loss=0.2442, pruned_loss=0.04232, over 4768.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2421, pruned_loss=0.05051, over 946299.64 frames. ], batch size: 28, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:37:35,641 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.216e+01 1.432e+02 1.893e+02 2.215e+02 5.796e+02, threshold=3.785e+02, percent-clipped=6.0 +2023-04-27 16:38:01,698 INFO [finetune.py:976] (0/7) Epoch 22, batch 1000, loss[loss=0.1762, simple_loss=0.2396, pruned_loss=0.05638, over 4227.00 frames. ], tot_loss[loss=0.1734, simple_loss=0.2442, pruned_loss=0.05134, over 947587.91 frames. ], batch size: 18, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:38:41,530 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121311.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 16:39:01,170 INFO [finetune.py:976] (0/7) Epoch 22, batch 1050, loss[loss=0.1651, simple_loss=0.2435, pruned_loss=0.04334, over 4823.00 frames. ], tot_loss[loss=0.1764, simple_loss=0.2476, pruned_loss=0.0526, over 949235.43 frames. ], batch size: 33, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:39:01,276 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121332.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:39:05,894 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6959, 2.1280, 1.7967, 1.5953, 1.2746, 1.3135, 1.9471, 1.2510], + device='cuda:0'), covar=tensor([0.1667, 0.1501, 0.1409, 0.1791, 0.2366, 0.1902, 0.0913, 0.2069], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0208, 0.0167, 0.0201, 0.0198, 0.0184, 0.0154, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 16:39:18,516 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121359.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:39:20,860 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.274e+02 1.706e+02 2.101e+02 2.455e+02 4.347e+02, threshold=4.202e+02, percent-clipped=2.0 +2023-04-27 16:39:26,488 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 16:39:26,934 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121372.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 16:39:31,739 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=121380.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:39:33,388 INFO [finetune.py:976] (0/7) Epoch 22, batch 1100, loss[loss=0.1964, simple_loss=0.2769, pruned_loss=0.05793, over 4808.00 frames. ], tot_loss[loss=0.1748, simple_loss=0.2464, pruned_loss=0.05157, over 950577.08 frames. ], batch size: 39, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:39:50,923 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121407.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:40:06,344 INFO [finetune.py:976] (0/7) Epoch 22, batch 1150, loss[loss=0.2127, simple_loss=0.2744, pruned_loss=0.07552, over 4812.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2473, pruned_loss=0.05131, over 951111.00 frames. ], batch size: 39, lr: 3.16e-03, grad_scale: 16.0 +2023-04-27 16:40:27,721 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.622e+02 1.904e+02 2.379e+02 4.461e+02, threshold=3.808e+02, percent-clipped=1.0 +2023-04-27 16:40:30,894 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=121468.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:40:39,780 INFO [finetune.py:976] (0/7) Epoch 22, batch 1200, loss[loss=0.1439, simple_loss=0.2273, pruned_loss=0.0303, over 4804.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2452, pruned_loss=0.05027, over 948847.94 frames. ], batch size: 25, lr: 3.15e-03, grad_scale: 16.0 +2023-04-27 16:41:05,076 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121520.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:41:12,819 INFO [finetune.py:976] (0/7) Epoch 22, batch 1250, loss[loss=0.1886, simple_loss=0.2591, pruned_loss=0.05909, over 4852.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2425, pruned_loss=0.0495, over 950187.84 frames. ], batch size: 44, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:41:34,045 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.460e+02 1.755e+02 2.215e+02 4.561e+02, threshold=3.510e+02, percent-clipped=2.0 +2023-04-27 16:41:46,168 INFO [finetune.py:976] (0/7) Epoch 22, batch 1300, loss[loss=0.1334, simple_loss=0.2033, pruned_loss=0.03175, over 4789.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2409, pruned_loss=0.04934, over 953080.65 frames. ], batch size: 29, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:42:01,530 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3568, 1.6506, 1.8258, 1.9578, 1.7936, 1.8982, 1.8380, 1.8936], + device='cuda:0'), covar=tensor([0.3982, 0.5346, 0.4374, 0.3917, 0.5276, 0.7437, 0.4886, 0.4537], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0375, 0.0326, 0.0340, 0.0348, 0.0396, 0.0358, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 16:42:19,081 INFO [finetune.py:976] (0/7) Epoch 22, batch 1350, loss[loss=0.1933, simple_loss=0.2682, pruned_loss=0.05916, over 4911.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2417, pruned_loss=0.04995, over 954138.07 frames. ], batch size: 37, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:42:55,952 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121659.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:42:58,251 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.587e+02 1.863e+02 2.182e+02 3.847e+02, threshold=3.726e+02, percent-clipped=2.0 +2023-04-27 16:43:04,720 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121667.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 16:43:14,557 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-27 16:43:19,352 INFO [finetune.py:976] (0/7) Epoch 22, batch 1400, loss[loss=0.1978, simple_loss=0.2716, pruned_loss=0.062, over 4904.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2436, pruned_loss=0.0503, over 955657.33 frames. ], batch size: 36, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:43:20,099 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7448, 2.4525, 1.8328, 1.7626, 1.2516, 1.2910, 2.0369, 1.2734], + device='cuda:0'), covar=tensor([0.1613, 0.1319, 0.1407, 0.1694, 0.2249, 0.1938, 0.0912, 0.2012], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0210, 0.0168, 0.0202, 0.0200, 0.0185, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 16:43:59,849 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=121707.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:44:24,395 INFO [finetune.py:976] (0/7) Epoch 22, batch 1450, loss[loss=0.1526, simple_loss=0.2393, pruned_loss=0.03294, over 4768.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2457, pruned_loss=0.05065, over 955352.27 frames. ], batch size: 28, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:45:04,294 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.748e+02 1.956e+02 2.570e+02 3.972e+02, threshold=3.912e+02, percent-clipped=2.0 +2023-04-27 16:45:04,381 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=121763.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:45:27,547 INFO [finetune.py:976] (0/7) Epoch 22, batch 1500, loss[loss=0.2026, simple_loss=0.2647, pruned_loss=0.07029, over 4857.00 frames. ], tot_loss[loss=0.1756, simple_loss=0.2479, pruned_loss=0.05164, over 955177.47 frames. ], batch size: 31, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:45:53,710 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-27 16:45:58,986 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121820.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:46:03,332 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2198, 1.6392, 2.1053, 2.4965, 2.0670, 1.6588, 1.4473, 1.8002], + device='cuda:0'), covar=tensor([0.3315, 0.3165, 0.1662, 0.2209, 0.2669, 0.2679, 0.4034, 0.2200], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0243, 0.0226, 0.0313, 0.0220, 0.0233, 0.0227, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 16:46:06,250 INFO [finetune.py:976] (0/7) Epoch 22, batch 1550, loss[loss=0.157, simple_loss=0.2406, pruned_loss=0.03674, over 4855.00 frames. ], tot_loss[loss=0.1755, simple_loss=0.2479, pruned_loss=0.05154, over 953735.77 frames. ], batch size: 34, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:46:28,045 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.267e+01 1.607e+02 1.890e+02 2.272e+02 5.935e+02, threshold=3.780e+02, percent-clipped=2.0 +2023-04-27 16:46:31,183 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=121868.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:46:39,811 INFO [finetune.py:976] (0/7) Epoch 22, batch 1600, loss[loss=0.1533, simple_loss=0.2109, pruned_loss=0.0479, over 4383.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2449, pruned_loss=0.05036, over 953584.18 frames. ], batch size: 19, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:46:47,828 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2207, 1.8678, 2.1627, 2.5644, 2.5058, 2.2164, 1.8567, 2.2986], + device='cuda:0'), covar=tensor([0.0793, 0.1125, 0.0687, 0.0591, 0.0546, 0.0778, 0.0754, 0.0535], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0200, 0.0182, 0.0173, 0.0176, 0.0178, 0.0151, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:47:07,145 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3584, 1.3857, 5.3872, 5.0200, 4.6910, 5.0764, 4.7172, 4.8373], + device='cuda:0'), covar=tensor([0.6237, 0.6142, 0.0957, 0.1669, 0.1052, 0.1347, 0.1004, 0.1557], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0307, 0.0406, 0.0406, 0.0346, 0.0409, 0.0311, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:47:13,760 INFO [finetune.py:976] (0/7) Epoch 22, batch 1650, loss[loss=0.1687, simple_loss=0.2328, pruned_loss=0.05226, over 4830.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2422, pruned_loss=0.04932, over 954867.32 frames. ], batch size: 25, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:47:24,846 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=121950.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:47:32,453 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9192, 1.3254, 1.6283, 1.5615, 1.7003, 1.3812, 0.8037, 1.2819], + device='cuda:0'), covar=tensor([0.3215, 0.3919, 0.1967, 0.2392, 0.2459, 0.2832, 0.4268, 0.2223], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0243, 0.0225, 0.0312, 0.0219, 0.0232, 0.0226, 0.0183], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 16:47:33,996 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.216e+02 1.538e+02 1.825e+02 2.150e+02 3.786e+02, threshold=3.649e+02, percent-clipped=1.0 +2023-04-27 16:47:35,201 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5733, 3.1787, 2.6188, 3.1427, 2.3861, 2.8473, 2.9574, 2.1614], + device='cuda:0'), covar=tensor([0.2106, 0.1200, 0.0891, 0.1192, 0.2937, 0.1137, 0.1849, 0.2696], + device='cuda:0'), in_proj_covar=tensor([0.0288, 0.0303, 0.0218, 0.0279, 0.0317, 0.0257, 0.0251, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1518e-04, 1.1990e-04, 8.6175e-05, 1.1022e-04, 1.2824e-04, 1.0177e-04, + 1.0106e-04, 1.0497e-04], device='cuda:0') +2023-04-27 16:47:37,515 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=121967.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 16:47:44,201 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8879, 1.1830, 3.2931, 3.0806, 2.9659, 3.2042, 3.2028, 2.9096], + device='cuda:0'), covar=tensor([0.7243, 0.5368, 0.1510, 0.2173, 0.1297, 0.2137, 0.1698, 0.1839], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0307, 0.0405, 0.0406, 0.0346, 0.0408, 0.0311, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:47:47,186 INFO [finetune.py:976] (0/7) Epoch 22, batch 1700, loss[loss=0.18, simple_loss=0.2474, pruned_loss=0.05635, over 4927.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2398, pruned_loss=0.04884, over 955307.71 frames. ], batch size: 38, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:47:50,904 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5021, 3.3218, 0.9908, 1.6800, 1.7427, 2.4541, 1.8990, 0.9728], + device='cuda:0'), covar=tensor([0.1420, 0.0990, 0.2030, 0.1339, 0.1202, 0.1023, 0.1544, 0.2127], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0240, 0.0137, 0.0120, 0.0132, 0.0152, 0.0116, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 16:47:58,269 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-122000.pt +2023-04-27 16:48:06,913 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122011.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:48:15,574 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=122015.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 16:48:37,956 INFO [finetune.py:976] (0/7) Epoch 22, batch 1750, loss[loss=0.1558, simple_loss=0.2234, pruned_loss=0.04408, over 4759.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2414, pruned_loss=0.04916, over 953157.82 frames. ], batch size: 27, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:49:13,879 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.616e+02 1.845e+02 2.409e+02 4.396e+02, threshold=3.689e+02, percent-clipped=3.0 +2023-04-27 16:49:13,978 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122063.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:49:43,794 INFO [finetune.py:976] (0/7) Epoch 22, batch 1800, loss[loss=0.1862, simple_loss=0.261, pruned_loss=0.05569, over 4903.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2438, pruned_loss=0.04997, over 951521.16 frames. ], batch size: 36, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:50:18,107 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=122111.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:50:27,650 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122117.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:50:41,148 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-27 16:50:49,863 INFO [finetune.py:976] (0/7) Epoch 22, batch 1850, loss[loss=0.153, simple_loss=0.2354, pruned_loss=0.03529, over 4815.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2449, pruned_loss=0.04971, over 953505.90 frames. ], batch size: 40, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:51:27,102 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.233e+01 1.753e+02 1.978e+02 2.407e+02 4.460e+02, threshold=3.956e+02, percent-clipped=4.0 +2023-04-27 16:51:48,069 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122178.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 16:51:56,333 INFO [finetune.py:976] (0/7) Epoch 22, batch 1900, loss[loss=0.1769, simple_loss=0.2467, pruned_loss=0.05351, over 4732.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2465, pruned_loss=0.04988, over 953642.43 frames. ], batch size: 59, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:52:10,095 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122195.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:52:40,530 INFO [finetune.py:976] (0/7) Epoch 22, batch 1950, loss[loss=0.2207, simple_loss=0.2711, pruned_loss=0.08516, over 4216.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2458, pruned_loss=0.05003, over 953913.10 frames. ], batch size: 65, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:52:41,959 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-27 16:52:55,284 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122256.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:52:59,361 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.614e+02 1.828e+02 2.204e+02 4.650e+02, threshold=3.656e+02, percent-clipped=1.0 +2023-04-27 16:53:09,638 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6221, 2.3430, 2.6518, 3.1004, 2.8677, 2.6650, 2.2557, 2.6378], + device='cuda:0'), covar=tensor([0.0740, 0.0937, 0.0613, 0.0480, 0.0544, 0.0708, 0.0673, 0.0502], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0200, 0.0182, 0.0173, 0.0176, 0.0178, 0.0151, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:53:13,168 INFO [finetune.py:976] (0/7) Epoch 22, batch 2000, loss[loss=0.1848, simple_loss=0.2355, pruned_loss=0.06704, over 4829.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.243, pruned_loss=0.04923, over 953554.24 frames. ], batch size: 38, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:53:29,033 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122306.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:53:37,041 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122319.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:53:46,289 INFO [finetune.py:976] (0/7) Epoch 22, batch 2050, loss[loss=0.1484, simple_loss=0.2187, pruned_loss=0.03902, over 4912.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2391, pruned_loss=0.04783, over 953796.15 frames. ], batch size: 37, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:54:01,969 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4129, 1.6613, 1.8636, 1.9652, 1.8069, 1.9046, 1.9418, 1.9152], + device='cuda:0'), covar=tensor([0.3913, 0.5354, 0.4393, 0.4218, 0.5447, 0.6974, 0.4841, 0.4515], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0371, 0.0323, 0.0337, 0.0345, 0.0392, 0.0354, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 16:54:12,101 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.067e+02 1.559e+02 1.860e+02 2.262e+02 4.767e+02, threshold=3.720e+02, percent-clipped=3.0 +2023-04-27 16:54:14,253 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-27 16:54:35,064 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122380.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:54:36,179 INFO [finetune.py:976] (0/7) Epoch 22, batch 2100, loss[loss=0.1683, simple_loss=0.2453, pruned_loss=0.04569, over 4945.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2387, pruned_loss=0.04778, over 951903.08 frames. ], batch size: 33, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:55:29,615 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 16:55:40,953 INFO [finetune.py:976] (0/7) Epoch 22, batch 2150, loss[loss=0.1904, simple_loss=0.2447, pruned_loss=0.06805, over 4828.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.242, pruned_loss=0.04924, over 952133.28 frames. ], batch size: 30, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:55:49,327 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-27 16:56:03,590 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9012, 1.5515, 1.4976, 1.5876, 2.0545, 1.6631, 1.3837, 1.4215], + device='cuda:0'), covar=tensor([0.1590, 0.1536, 0.2072, 0.1493, 0.0904, 0.2025, 0.2117, 0.2386], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0310, 0.0350, 0.0289, 0.0325, 0.0308, 0.0299, 0.0371], + device='cuda:0'), out_proj_covar=tensor([6.3889e-05, 6.4215e-05, 7.3650e-05, 5.8155e-05, 6.6907e-05, 6.4523e-05, + 6.2587e-05, 7.8787e-05], device='cuda:0') +2023-04-27 16:56:18,841 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3118, 2.7407, 2.3201, 2.8591, 2.0593, 2.5265, 2.6914, 1.8226], + device='cuda:0'), covar=tensor([0.1892, 0.1155, 0.0867, 0.1017, 0.3143, 0.1043, 0.1968, 0.2549], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0302, 0.0217, 0.0277, 0.0315, 0.0256, 0.0249, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1403e-04, 1.1977e-04, 8.5842e-05, 1.0934e-04, 1.2751e-04, 1.0137e-04, + 1.0025e-04, 1.0394e-04], device='cuda:0') +2023-04-27 16:56:19,327 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.667e+02 1.982e+02 2.424e+02 1.094e+03, threshold=3.964e+02, percent-clipped=3.0 +2023-04-27 16:56:22,553 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122468.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:56:30,964 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122473.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 16:56:32,334 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 16:56:42,042 INFO [finetune.py:976] (0/7) Epoch 22, batch 2200, loss[loss=0.1461, simple_loss=0.2223, pruned_loss=0.03492, over 3946.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.243, pruned_loss=0.04901, over 951419.85 frames. ], batch size: 17, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:56:43,395 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8078, 1.7518, 1.6347, 1.3772, 1.8585, 1.5894, 2.2671, 1.4983], + device='cuda:0'), covar=tensor([0.3686, 0.1776, 0.4858, 0.3054, 0.1613, 0.2372, 0.1660, 0.4699], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0350, 0.0427, 0.0354, 0.0382, 0.0376, 0.0368, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 16:57:06,864 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.99 vs. limit=2.0 +2023-04-27 16:57:48,023 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122529.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:57:49,741 INFO [finetune.py:976] (0/7) Epoch 22, batch 2250, loss[loss=0.1521, simple_loss=0.2288, pruned_loss=0.03772, over 4875.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2428, pruned_loss=0.04831, over 949419.74 frames. ], batch size: 34, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:57:59,927 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5017, 1.4396, 1.8510, 1.7684, 1.3551, 1.2518, 1.5630, 0.9966], + device='cuda:0'), covar=tensor([0.0522, 0.0576, 0.0360, 0.0617, 0.0706, 0.1159, 0.0543, 0.0615], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0067, 0.0066, 0.0067, 0.0074, 0.0095, 0.0072, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 16:58:19,793 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122551.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 16:58:34,009 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.195e+02 1.620e+02 1.922e+02 2.285e+02 4.968e+02, threshold=3.845e+02, percent-clipped=1.0 +2023-04-27 16:58:56,472 INFO [finetune.py:976] (0/7) Epoch 22, batch 2300, loss[loss=0.1368, simple_loss=0.2093, pruned_loss=0.03214, over 3283.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2428, pruned_loss=0.04779, over 950848.51 frames. ], batch size: 14, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 16:58:57,351 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.70 vs. limit=5.0 +2023-04-27 16:59:03,558 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 16:59:16,074 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9652, 2.5433, 2.0609, 2.0381, 1.3876, 1.4751, 2.0688, 1.3659], + device='cuda:0'), covar=tensor([0.1624, 0.1307, 0.1420, 0.1576, 0.2448, 0.1956, 0.0982, 0.2065], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0212, 0.0170, 0.0204, 0.0201, 0.0187, 0.0157, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 16:59:19,599 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-04-27 16:59:36,025 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122606.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:00:02,689 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3831, 2.8015, 2.4146, 2.9121, 2.1921, 2.6655, 2.8023, 2.0267], + device='cuda:0'), covar=tensor([0.2090, 0.1336, 0.0822, 0.1208, 0.3158, 0.1179, 0.1824, 0.2976], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0304, 0.0218, 0.0277, 0.0315, 0.0256, 0.0248, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1395e-04, 1.2038e-04, 8.5977e-05, 1.0956e-04, 1.2768e-04, 1.0139e-04, + 1.0017e-04, 1.0410e-04], device='cuda:0') +2023-04-27 17:00:08,780 INFO [finetune.py:976] (0/7) Epoch 22, batch 2350, loss[loss=0.1647, simple_loss=0.2224, pruned_loss=0.0535, over 4823.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2411, pruned_loss=0.04752, over 953437.61 frames. ], batch size: 33, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 17:00:34,408 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1667, 1.4848, 1.3361, 1.7243, 1.5929, 1.6528, 1.3759, 2.9496], + device='cuda:0'), covar=tensor([0.0630, 0.0772, 0.0771, 0.1145, 0.0580, 0.0545, 0.0694, 0.0183], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 17:00:40,760 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=122654.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:00:46,226 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.405e+01 1.589e+02 1.908e+02 2.329e+02 5.605e+02, threshold=3.816e+02, percent-clipped=4.0 +2023-04-27 17:01:04,139 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122675.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:01:14,128 INFO [finetune.py:976] (0/7) Epoch 22, batch 2400, loss[loss=0.167, simple_loss=0.2338, pruned_loss=0.05011, over 4911.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2398, pruned_loss=0.04737, over 953972.73 frames. ], batch size: 43, lr: 3.15e-03, grad_scale: 32.0 +2023-04-27 17:02:00,019 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-27 17:02:21,393 INFO [finetune.py:976] (0/7) Epoch 22, batch 2450, loss[loss=0.1627, simple_loss=0.2408, pruned_loss=0.04228, over 4863.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2375, pruned_loss=0.04741, over 955584.77 frames. ], batch size: 44, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:02:27,068 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.64 vs. limit=2.0 +2023-04-27 17:02:38,156 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0413, 0.7091, 0.8979, 0.7832, 1.1959, 0.9624, 0.8450, 0.9382], + device='cuda:0'), covar=tensor([0.1869, 0.1658, 0.2117, 0.1736, 0.1114, 0.1593, 0.1899, 0.2455], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0310, 0.0349, 0.0287, 0.0324, 0.0308, 0.0299, 0.0370], + device='cuda:0'), out_proj_covar=tensor([6.3771e-05, 6.4205e-05, 7.3435e-05, 5.7879e-05, 6.6751e-05, 6.4558e-05, + 6.2586e-05, 7.8590e-05], device='cuda:0') +2023-04-27 17:02:42,828 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.578e+02 1.755e+02 2.241e+02 3.458e+02, threshold=3.510e+02, percent-clipped=0.0 +2023-04-27 17:02:48,955 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122773.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 17:02:54,345 INFO [finetune.py:976] (0/7) Epoch 22, batch 2500, loss[loss=0.2052, simple_loss=0.2706, pruned_loss=0.06995, over 4895.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2379, pruned_loss=0.04802, over 957240.54 frames. ], batch size: 32, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:03:21,720 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=122821.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:03:23,558 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=122824.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:03:28,387 INFO [finetune.py:976] (0/7) Epoch 22, batch 2550, loss[loss=0.162, simple_loss=0.2165, pruned_loss=0.0538, over 4060.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2404, pruned_loss=0.04865, over 956698.95 frames. ], batch size: 17, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:03:40,088 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122851.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:03:48,776 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.124e+02 1.572e+02 1.988e+02 2.306e+02 4.105e+02, threshold=3.976e+02, percent-clipped=1.0 +2023-04-27 17:03:48,862 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9859, 3.9967, 2.7813, 4.5565, 4.1030, 3.9095, 1.7256, 3.9237], + device='cuda:0'), covar=tensor([0.1693, 0.1137, 0.3449, 0.1586, 0.2249, 0.1745, 0.5838, 0.2262], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0217, 0.0252, 0.0306, 0.0295, 0.0248, 0.0277, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 17:03:48,981 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.62 vs. limit=5.0 +2023-04-27 17:03:54,594 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0029, 2.6502, 1.0027, 1.3783, 2.1371, 1.2172, 3.4038, 1.7348], + device='cuda:0'), covar=tensor([0.0734, 0.0647, 0.0889, 0.1304, 0.0509, 0.1026, 0.0235, 0.0641], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0009, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 17:04:00,766 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4370, 3.3952, 1.1975, 1.7786, 1.7284, 2.5260, 1.8318, 0.9932], + device='cuda:0'), covar=tensor([0.1421, 0.0863, 0.1741, 0.1253, 0.1153, 0.0961, 0.1573, 0.2067], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0241, 0.0139, 0.0120, 0.0133, 0.0153, 0.0118, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 17:04:01,377 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=122872.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:04:13,219 INFO [finetune.py:976] (0/7) Epoch 22, batch 2600, loss[loss=0.1395, simple_loss=0.2276, pruned_loss=0.02568, over 4889.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2418, pruned_loss=0.04878, over 956248.57 frames. ], batch size: 43, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:04:34,929 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=122899.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:05:19,790 INFO [finetune.py:976] (0/7) Epoch 22, batch 2650, loss[loss=0.1764, simple_loss=0.2615, pruned_loss=0.04568, over 4907.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2436, pruned_loss=0.04946, over 955539.21 frames. ], batch size: 36, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:05:19,912 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7303, 1.8641, 0.8546, 1.4168, 1.9159, 1.5541, 1.4920, 1.5931], + device='cuda:0'), covar=tensor([0.0479, 0.0362, 0.0334, 0.0554, 0.0250, 0.0515, 0.0506, 0.0545], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 17:05:20,514 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=122933.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:06:00,532 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.091e+02 1.522e+02 1.838e+02 2.219e+02 4.134e+02, threshold=3.677e+02, percent-clipped=1.0 +2023-04-27 17:06:15,881 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=122975.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:06:26,130 INFO [finetune.py:976] (0/7) Epoch 22, batch 2700, loss[loss=0.1562, simple_loss=0.2267, pruned_loss=0.04286, over 4833.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2432, pruned_loss=0.04922, over 955536.24 frames. ], batch size: 47, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:06:46,814 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-27 17:06:56,055 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=123023.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:07:02,522 INFO [finetune.py:976] (0/7) Epoch 22, batch 2750, loss[loss=0.1555, simple_loss=0.2261, pruned_loss=0.04241, over 4930.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2415, pruned_loss=0.04878, over 956405.46 frames. ], batch size: 33, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:07:28,109 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.026e+01 1.558e+02 1.841e+02 2.200e+02 4.189e+02, threshold=3.681e+02, percent-clipped=2.0 +2023-04-27 17:07:41,096 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.89 vs. limit=5.0 +2023-04-27 17:07:53,156 INFO [finetune.py:976] (0/7) Epoch 22, batch 2800, loss[loss=0.2064, simple_loss=0.2718, pruned_loss=0.07049, over 4763.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2392, pruned_loss=0.04859, over 954480.89 frames. ], batch size: 26, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:08:48,190 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123124.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:08:53,535 INFO [finetune.py:976] (0/7) Epoch 22, batch 2850, loss[loss=0.2274, simple_loss=0.2875, pruned_loss=0.08364, over 4852.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2385, pruned_loss=0.04878, over 955754.50 frames. ], batch size: 49, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:09:07,561 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7109, 1.9120, 1.0848, 1.4791, 2.1264, 1.5119, 1.5138, 1.6408], + device='cuda:0'), covar=tensor([0.0459, 0.0334, 0.0287, 0.0506, 0.0235, 0.0461, 0.0437, 0.0516], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 17:09:12,897 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.588e+02 1.846e+02 2.327e+02 3.843e+02, threshold=3.691e+02, percent-clipped=1.0 +2023-04-27 17:09:19,397 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=123172.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:09:27,500 INFO [finetune.py:976] (0/7) Epoch 22, batch 2900, loss[loss=0.1971, simple_loss=0.2447, pruned_loss=0.0747, over 4075.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2411, pruned_loss=0.04943, over 955487.94 frames. ], batch size: 17, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:09:35,587 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-27 17:10:09,343 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123228.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:10:11,741 INFO [finetune.py:976] (0/7) Epoch 22, batch 2950, loss[loss=0.1519, simple_loss=0.2328, pruned_loss=0.03552, over 4861.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2451, pruned_loss=0.0507, over 955084.37 frames. ], batch size: 31, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:10:20,037 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0875, 2.4704, 1.0687, 1.3815, 2.1286, 1.2077, 3.2612, 1.7565], + device='cuda:0'), covar=tensor([0.0677, 0.0814, 0.0822, 0.1188, 0.0474, 0.1004, 0.0203, 0.0595], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0048, 0.0046, 0.0049, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 17:10:55,130 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.651e+02 2.036e+02 2.473e+02 7.289e+02, threshold=4.071e+02, percent-clipped=4.0 +2023-04-27 17:11:18,373 INFO [finetune.py:976] (0/7) Epoch 22, batch 3000, loss[loss=0.1795, simple_loss=0.2544, pruned_loss=0.05228, over 4914.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2451, pruned_loss=0.05098, over 955422.89 frames. ], batch size: 29, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:11:18,375 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 17:11:20,896 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.4016, 3.4161, 2.5625, 3.9020, 3.5420, 3.5154, 1.4591, 3.4523], + device='cuda:0'), covar=tensor([0.1966, 0.1408, 0.3496, 0.2172, 0.2325, 0.1680, 0.5598, 0.2426], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0217, 0.0251, 0.0306, 0.0296, 0.0246, 0.0277, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 17:11:29,117 INFO [finetune.py:1010] (0/7) Epoch 22, validation: loss=0.1537, simple_loss=0.2227, pruned_loss=0.04237, over 2265189.00 frames. +2023-04-27 17:11:29,117 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 17:11:39,690 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.29 vs. limit=5.0 +2023-04-27 17:12:11,355 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123314.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:12:33,517 INFO [finetune.py:976] (0/7) Epoch 22, batch 3050, loss[loss=0.2096, simple_loss=0.2689, pruned_loss=0.07509, over 4829.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2457, pruned_loss=0.05095, over 955794.32 frames. ], batch size: 47, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:13:01,591 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.350e+01 1.609e+02 1.855e+02 2.357e+02 3.763e+02, threshold=3.710e+02, percent-clipped=0.0 +2023-04-27 17:13:14,312 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123375.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:13:23,022 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-27 17:13:24,029 INFO [finetune.py:976] (0/7) Epoch 22, batch 3100, loss[loss=0.1596, simple_loss=0.2147, pruned_loss=0.0523, over 4170.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2438, pruned_loss=0.05017, over 955940.99 frames. ], batch size: 18, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:14:29,439 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123428.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:14:31,785 INFO [finetune.py:976] (0/7) Epoch 22, batch 3150, loss[loss=0.1266, simple_loss=0.2013, pruned_loss=0.02592, over 4786.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.241, pruned_loss=0.04891, over 956268.26 frames. ], batch size: 29, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:15:16,033 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.572e+02 1.779e+02 2.164e+02 3.617e+02, threshold=3.558e+02, percent-clipped=0.0 +2023-04-27 17:15:38,620 INFO [finetune.py:976] (0/7) Epoch 22, batch 3200, loss[loss=0.1718, simple_loss=0.2423, pruned_loss=0.0507, over 4791.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.238, pruned_loss=0.04775, over 953352.15 frames. ], batch size: 51, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:15:48,867 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123489.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:15:48,891 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7039, 2.1928, 1.7267, 1.5344, 1.3191, 1.2841, 1.7256, 1.2837], + device='cuda:0'), covar=tensor([0.1404, 0.1217, 0.1271, 0.1574, 0.2171, 0.1818, 0.0943, 0.1891], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0212, 0.0169, 0.0204, 0.0200, 0.0186, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 17:16:43,128 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123528.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:16:51,262 INFO [finetune.py:976] (0/7) Epoch 22, batch 3250, loss[loss=0.1949, simple_loss=0.2702, pruned_loss=0.05975, over 4845.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2379, pruned_loss=0.04814, over 950429.52 frames. ], batch size: 49, lr: 3.14e-03, grad_scale: 64.0 +2023-04-27 17:17:26,394 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.204e+02 1.572e+02 1.866e+02 2.251e+02 4.300e+02, threshold=3.733e+02, percent-clipped=4.0 +2023-04-27 17:17:40,412 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=123576.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:17:44,044 INFO [finetune.py:976] (0/7) Epoch 22, batch 3300, loss[loss=0.2009, simple_loss=0.279, pruned_loss=0.0614, over 4925.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2418, pruned_loss=0.04962, over 950681.52 frames. ], batch size: 36, lr: 3.14e-03, grad_scale: 64.0 +2023-04-27 17:17:50,365 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-27 17:18:17,766 INFO [finetune.py:976] (0/7) Epoch 22, batch 3350, loss[loss=0.1666, simple_loss=0.2382, pruned_loss=0.04751, over 4779.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2444, pruned_loss=0.05022, over 950944.54 frames. ], batch size: 25, lr: 3.14e-03, grad_scale: 64.0 +2023-04-27 17:18:40,219 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.630e+02 1.835e+02 2.174e+02 3.522e+02, threshold=3.670e+02, percent-clipped=0.0 +2023-04-27 17:18:43,918 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123670.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:18:48,068 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1405, 1.3977, 1.3112, 1.6838, 1.5070, 1.5786, 1.3211, 2.4670], + device='cuda:0'), covar=tensor([0.0581, 0.0811, 0.0789, 0.1187, 0.0651, 0.0481, 0.0723, 0.0200], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 17:18:51,586 INFO [finetune.py:976] (0/7) Epoch 22, batch 3400, loss[loss=0.1599, simple_loss=0.2463, pruned_loss=0.03677, over 4812.00 frames. ], tot_loss[loss=0.174, simple_loss=0.2461, pruned_loss=0.05089, over 951465.89 frames. ], batch size: 38, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:18:59,617 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5962, 1.6307, 0.8136, 1.3036, 1.6364, 1.4418, 1.3882, 1.4437], + device='cuda:0'), covar=tensor([0.0470, 0.0358, 0.0342, 0.0535, 0.0283, 0.0483, 0.0493, 0.0513], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0028, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 17:19:25,131 INFO [finetune.py:976] (0/7) Epoch 22, batch 3450, loss[loss=0.1739, simple_loss=0.2362, pruned_loss=0.05587, over 4743.00 frames. ], tot_loss[loss=0.174, simple_loss=0.246, pruned_loss=0.05097, over 950717.74 frames. ], batch size: 54, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:19:46,816 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.7402, 1.6702, 1.6420, 1.2611, 1.7838, 1.5249, 2.2404, 1.5143], + device='cuda:0'), covar=tensor([0.3812, 0.1918, 0.4701, 0.3029, 0.1713, 0.2200, 0.1538, 0.4307], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0346, 0.0423, 0.0352, 0.0380, 0.0374, 0.0368, 0.0415], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 17:19:57,042 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.610e+02 1.907e+02 2.404e+02 3.474e+02, threshold=3.815e+02, percent-clipped=0.0 +2023-04-27 17:20:20,641 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5611, 3.8110, 0.7652, 2.0243, 2.1014, 2.5537, 2.2702, 0.9812], + device='cuda:0'), covar=tensor([0.1380, 0.0769, 0.2177, 0.1220, 0.1014, 0.1137, 0.1457, 0.2133], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0241, 0.0138, 0.0120, 0.0133, 0.0153, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 17:20:21,192 INFO [finetune.py:976] (0/7) Epoch 22, batch 3500, loss[loss=0.1223, simple_loss=0.1995, pruned_loss=0.02253, over 4842.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2442, pruned_loss=0.05082, over 952725.37 frames. ], batch size: 49, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:20:28,342 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=123784.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:21:02,321 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-27 17:21:22,864 INFO [finetune.py:976] (0/7) Epoch 22, batch 3550, loss[loss=0.1956, simple_loss=0.2497, pruned_loss=0.0708, over 4826.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2415, pruned_loss=0.04985, over 954070.57 frames. ], batch size: 33, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:21:27,115 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0012, 2.7630, 2.0014, 2.0391, 1.3974, 1.4750, 2.0793, 1.4046], + device='cuda:0'), covar=tensor([0.1854, 0.1303, 0.1477, 0.1654, 0.2501, 0.2168, 0.0990, 0.2168], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0211, 0.0168, 0.0204, 0.0199, 0.0185, 0.0155, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 17:21:43,412 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.018e+02 1.536e+02 1.822e+02 2.195e+02 3.918e+02, threshold=3.645e+02, percent-clipped=1.0 +2023-04-27 17:21:49,387 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=123872.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:21:49,506 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 17:21:56,774 INFO [finetune.py:976] (0/7) Epoch 22, batch 3600, loss[loss=0.1574, simple_loss=0.2327, pruned_loss=0.04105, over 4801.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2398, pruned_loss=0.0493, over 956624.65 frames. ], batch size: 51, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:22:30,655 INFO [finetune.py:976] (0/7) Epoch 22, batch 3650, loss[loss=0.1639, simple_loss=0.251, pruned_loss=0.03838, over 4841.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2413, pruned_loss=0.04954, over 956651.66 frames. ], batch size: 44, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:22:31,432 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=123933.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:23:14,431 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.652e+02 1.968e+02 2.235e+02 6.540e+02, threshold=3.937e+02, percent-clipped=5.0 +2023-04-27 17:23:23,357 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=123970.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:23:37,656 INFO [finetune.py:976] (0/7) Epoch 22, batch 3700, loss[loss=0.1765, simple_loss=0.2595, pruned_loss=0.0468, over 4913.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2459, pruned_loss=0.05093, over 955856.71 frames. ], batch size: 37, lr: 3.14e-03, grad_scale: 32.0 +2023-04-27 17:23:39,013 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0866, 1.7438, 2.2671, 2.5367, 2.0897, 2.0558, 2.1853, 2.0697], + device='cuda:0'), covar=tensor([0.4440, 0.7260, 0.6859, 0.5212, 0.6168, 0.8196, 0.8385, 0.9688], + device='cuda:0'), in_proj_covar=tensor([0.0430, 0.0413, 0.0509, 0.0505, 0.0460, 0.0489, 0.0496, 0.0506], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 17:24:00,393 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-124000.pt +2023-04-27 17:24:07,704 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5618, 2.0927, 1.6221, 1.5594, 1.1730, 1.1760, 1.6656, 1.1722], + device='cuda:0'), covar=tensor([0.1603, 0.1296, 0.1419, 0.1662, 0.2347, 0.1917, 0.0960, 0.2089], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0212, 0.0170, 0.0206, 0.0201, 0.0187, 0.0156, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 17:24:22,552 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=124018.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:24:44,187 INFO [finetune.py:976] (0/7) Epoch 22, batch 3750, loss[loss=0.1518, simple_loss=0.2291, pruned_loss=0.03721, over 4855.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2474, pruned_loss=0.05165, over 954267.17 frames. ], batch size: 44, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:24:52,250 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124036.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:25:08,747 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124062.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:25:09,868 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.228e+02 1.543e+02 1.791e+02 2.127e+02 4.626e+02, threshold=3.581e+02, percent-clipped=2.0 +2023-04-27 17:25:22,232 INFO [finetune.py:976] (0/7) Epoch 22, batch 3800, loss[loss=0.164, simple_loss=0.243, pruned_loss=0.04244, over 4831.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2476, pruned_loss=0.05164, over 954107.69 frames. ], batch size: 30, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:25:23,550 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124084.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:25:32,953 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124097.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:25:49,428 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124123.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:25:56,294 INFO [finetune.py:976] (0/7) Epoch 22, batch 3850, loss[loss=0.1535, simple_loss=0.2282, pruned_loss=0.03939, over 4755.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2458, pruned_loss=0.05039, over 951708.42 frames. ], batch size: 28, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:25:56,357 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=124132.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:26:06,452 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4093, 1.0268, 0.3484, 1.1829, 1.0482, 1.3058, 1.2400, 1.2549], + device='cuda:0'), covar=tensor([0.0425, 0.0331, 0.0383, 0.0504, 0.0277, 0.0443, 0.0420, 0.0481], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 17:26:17,750 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.596e+02 1.902e+02 2.251e+02 4.955e+02, threshold=3.804e+02, percent-clipped=1.0 +2023-04-27 17:26:20,306 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0769, 2.3189, 2.0882, 2.3688, 1.9196, 2.1464, 2.0124, 1.6742], + device='cuda:0'), covar=tensor([0.1420, 0.1109, 0.0775, 0.0981, 0.2658, 0.1046, 0.1568, 0.1905], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0302, 0.0218, 0.0278, 0.0315, 0.0256, 0.0249, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1413e-04, 1.1937e-04, 8.5972e-05, 1.0984e-04, 1.2755e-04, 1.0136e-04, + 1.0069e-04, 1.0384e-04], device='cuda:0') +2023-04-27 17:26:21,481 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2365, 1.5136, 1.4749, 1.7781, 1.7175, 1.8361, 1.3816, 3.3615], + device='cuda:0'), covar=tensor([0.0607, 0.0751, 0.0731, 0.1113, 0.0579, 0.0484, 0.0702, 0.0143], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 17:26:35,209 INFO [finetune.py:976] (0/7) Epoch 22, batch 3900, loss[loss=0.1566, simple_loss=0.2318, pruned_loss=0.04068, over 4822.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2434, pruned_loss=0.04967, over 950728.04 frames. ], batch size: 41, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:27:29,093 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8905, 1.8298, 4.4057, 4.1559, 3.9233, 4.1692, 4.1351, 3.9134], + device='cuda:0'), covar=tensor([0.6370, 0.4895, 0.0945, 0.1552, 0.1013, 0.1338, 0.1171, 0.1377], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0304, 0.0403, 0.0405, 0.0345, 0.0407, 0.0310, 0.0362], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 17:27:38,099 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124228.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:27:40,960 INFO [finetune.py:976] (0/7) Epoch 22, batch 3950, loss[loss=0.15, simple_loss=0.2091, pruned_loss=0.04545, over 4394.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2399, pruned_loss=0.04848, over 951506.90 frames. ], batch size: 19, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:28:07,729 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.810e+01 1.371e+02 1.651e+02 2.165e+02 4.664e+02, threshold=3.302e+02, percent-clipped=1.0 +2023-04-27 17:28:12,585 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4432, 1.0561, 0.3658, 1.1834, 1.0669, 1.3271, 1.2204, 1.2542], + device='cuda:0'), covar=tensor([0.0515, 0.0413, 0.0421, 0.0578, 0.0316, 0.0486, 0.0496, 0.0556], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 17:28:13,183 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124272.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:28:15,617 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2833, 1.6068, 1.6339, 1.9537, 1.8735, 2.0659, 1.6016, 4.2904], + device='cuda:0'), covar=tensor([0.0542, 0.0800, 0.0778, 0.1167, 0.0626, 0.0527, 0.0742, 0.0104], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 17:28:19,121 INFO [finetune.py:976] (0/7) Epoch 22, batch 4000, loss[loss=0.169, simple_loss=0.234, pruned_loss=0.05203, over 4763.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2395, pruned_loss=0.04931, over 952305.36 frames. ], batch size: 27, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:28:58,315 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.84 vs. limit=2.0 +2023-04-27 17:29:09,166 INFO [finetune.py:976] (0/7) Epoch 22, batch 4050, loss[loss=0.2485, simple_loss=0.3091, pruned_loss=0.09392, over 4812.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.2435, pruned_loss=0.05084, over 951986.14 frames. ], batch size: 30, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:29:09,904 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124333.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:29:33,232 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124348.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 17:29:54,265 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.048e+02 1.579e+02 1.902e+02 2.335e+02 4.180e+02, threshold=3.804e+02, percent-clipped=3.0 +2023-04-27 17:30:17,167 INFO [finetune.py:976] (0/7) Epoch 22, batch 4100, loss[loss=0.196, simple_loss=0.2679, pruned_loss=0.06205, over 4924.00 frames. ], tot_loss[loss=0.1749, simple_loss=0.2469, pruned_loss=0.05146, over 953847.41 frames. ], batch size: 33, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:30:27,907 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4501, 1.1050, 0.5183, 1.1880, 1.1176, 1.3419, 1.2625, 1.2388], + device='cuda:0'), covar=tensor([0.0494, 0.0372, 0.0403, 0.0526, 0.0311, 0.0452, 0.0456, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 17:30:34,103 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124392.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:30:34,277 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 17:30:58,096 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124409.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 17:31:08,994 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124418.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:31:27,631 INFO [finetune.py:976] (0/7) Epoch 22, batch 4150, loss[loss=0.1477, simple_loss=0.2187, pruned_loss=0.03837, over 4852.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2481, pruned_loss=0.05169, over 954876.32 frames. ], batch size: 31, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:31:27,737 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124432.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:32:10,278 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.606e+01 1.564e+02 1.862e+02 2.379e+02 7.363e+02, threshold=3.724e+02, percent-clipped=1.0 +2023-04-27 17:32:33,360 INFO [finetune.py:976] (0/7) Epoch 22, batch 4200, loss[loss=0.1797, simple_loss=0.2407, pruned_loss=0.05936, over 4764.00 frames. ], tot_loss[loss=0.1757, simple_loss=0.2486, pruned_loss=0.05144, over 954950.58 frames. ], batch size: 26, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:32:45,168 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124493.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:33:09,252 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124528.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:33:11,581 INFO [finetune.py:976] (0/7) Epoch 22, batch 4250, loss[loss=0.1544, simple_loss=0.2273, pruned_loss=0.04068, over 4836.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2449, pruned_loss=0.04969, over 956726.77 frames. ], batch size: 47, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:33:33,183 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.562e+02 1.869e+02 2.245e+02 4.302e+02, threshold=3.738e+02, percent-clipped=2.0 +2023-04-27 17:33:40,655 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=124576.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:33:44,726 INFO [finetune.py:976] (0/7) Epoch 22, batch 4300, loss[loss=0.2362, simple_loss=0.2982, pruned_loss=0.08713, over 4423.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2432, pruned_loss=0.04935, over 956842.20 frames. ], batch size: 19, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:33:53,978 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8609, 2.2377, 1.0477, 1.3312, 1.7212, 1.2196, 2.3178, 1.3418], + device='cuda:0'), covar=tensor([0.0658, 0.0645, 0.0604, 0.1012, 0.0373, 0.0858, 0.0295, 0.0606], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 17:34:15,439 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124628.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:34:17,826 INFO [finetune.py:976] (0/7) Epoch 22, batch 4350, loss[loss=0.1772, simple_loss=0.2387, pruned_loss=0.05785, over 4833.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2396, pruned_loss=0.04787, over 957012.85 frames. ], batch size: 30, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:34:22,037 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124638.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:34:38,885 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124663.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:34:39,364 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.058e+02 1.483e+02 1.838e+02 2.190e+02 4.498e+02, threshold=3.677e+02, percent-clipped=2.0 +2023-04-27 17:34:51,254 INFO [finetune.py:976] (0/7) Epoch 22, batch 4400, loss[loss=0.2249, simple_loss=0.2891, pruned_loss=0.08032, over 4823.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2409, pruned_loss=0.04896, over 956088.66 frames. ], batch size: 39, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:34:58,038 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124692.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:35:02,324 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124699.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:35:11,386 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124704.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 17:35:32,548 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124718.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:35:42,955 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124724.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:35:53,108 INFO [finetune.py:976] (0/7) Epoch 22, batch 4450, loss[loss=0.145, simple_loss=0.226, pruned_loss=0.03204, over 4762.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2437, pruned_loss=0.04978, over 955032.42 frames. ], batch size: 27, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:36:03,322 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7840, 0.8746, 1.6170, 2.0563, 1.8341, 1.6304, 1.6529, 1.6297], + device='cuda:0'), covar=tensor([0.4434, 0.6311, 0.5766, 0.5925, 0.5889, 0.7381, 0.7315, 0.8005], + device='cuda:0'), in_proj_covar=tensor([0.0434, 0.0417, 0.0512, 0.0509, 0.0464, 0.0493, 0.0500, 0.0510], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 17:36:03,819 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=124740.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:36:30,398 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.087e+02 1.602e+02 1.930e+02 2.289e+02 4.578e+02, threshold=3.860e+02, percent-clipped=3.0 +2023-04-27 17:36:38,218 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=124766.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:37:00,153 INFO [finetune.py:976] (0/7) Epoch 22, batch 4500, loss[loss=0.1653, simple_loss=0.2382, pruned_loss=0.04623, over 4773.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2445, pruned_loss=0.04974, over 954862.69 frames. ], batch size: 26, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:37:03,923 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124788.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:37:11,558 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-04-27 17:37:47,352 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=124820.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:38:06,670 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4533, 1.2845, 4.1401, 3.8516, 3.5757, 3.9736, 3.9581, 3.5267], + device='cuda:0'), covar=tensor([0.7227, 0.5981, 0.1067, 0.1864, 0.1246, 0.1314, 0.1488, 0.1738], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0305, 0.0406, 0.0408, 0.0348, 0.0410, 0.0313, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 17:38:07,812 INFO [finetune.py:976] (0/7) Epoch 22, batch 4550, loss[loss=0.1654, simple_loss=0.2456, pruned_loss=0.04263, over 4876.00 frames. ], tot_loss[loss=0.1741, simple_loss=0.247, pruned_loss=0.05065, over 955354.35 frames. ], batch size: 35, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:38:49,605 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.528e+02 1.862e+02 2.235e+02 3.504e+02, threshold=3.725e+02, percent-clipped=0.0 +2023-04-27 17:39:13,979 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=124881.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:39:14,477 INFO [finetune.py:976] (0/7) Epoch 22, batch 4600, loss[loss=0.1374, simple_loss=0.2124, pruned_loss=0.0312, over 4742.00 frames. ], tot_loss[loss=0.1747, simple_loss=0.2473, pruned_loss=0.05108, over 955108.14 frames. ], batch size: 54, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:39:58,744 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2468, 1.4251, 1.8003, 1.9040, 1.7744, 1.8870, 1.8335, 1.8155], + device='cuda:0'), covar=tensor([0.3478, 0.4891, 0.3775, 0.4261, 0.5106, 0.6484, 0.4630, 0.4628], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0373, 0.0324, 0.0339, 0.0348, 0.0393, 0.0357, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 17:40:00,422 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=124928.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:40:03,269 INFO [finetune.py:976] (0/7) Epoch 22, batch 4650, loss[loss=0.1485, simple_loss=0.2192, pruned_loss=0.03892, over 4908.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2443, pruned_loss=0.05029, over 953820.94 frames. ], batch size: 36, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:40:23,403 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.522e+01 1.501e+02 1.822e+02 2.156e+02 6.565e+02, threshold=3.645e+02, percent-clipped=1.0 +2023-04-27 17:40:31,668 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=124976.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:40:35,247 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8055, 2.1878, 2.1410, 2.3173, 2.0124, 2.1094, 2.1426, 2.1457], + device='cuda:0'), covar=tensor([0.4458, 0.5891, 0.5030, 0.4456, 0.5958, 0.7098, 0.6345, 0.5396], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0374, 0.0324, 0.0340, 0.0348, 0.0394, 0.0358, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 17:40:35,286 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.13 vs. limit=5.0 +2023-04-27 17:40:36,853 INFO [finetune.py:976] (0/7) Epoch 22, batch 4700, loss[loss=0.1461, simple_loss=0.2214, pruned_loss=0.0354, over 4906.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2404, pruned_loss=0.04872, over 955108.24 frames. ], batch size: 35, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:40:44,819 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=124994.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:40:51,156 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125004.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 17:41:00,868 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125019.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:41:10,028 INFO [finetune.py:976] (0/7) Epoch 22, batch 4750, loss[loss=0.2113, simple_loss=0.2747, pruned_loss=0.07393, over 4124.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2393, pruned_loss=0.04845, over 955884.98 frames. ], batch size: 65, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:41:23,893 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=125052.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 17:41:31,621 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.684e+01 1.607e+02 1.875e+02 2.293e+02 4.178e+02, threshold=3.749e+02, percent-clipped=1.0 +2023-04-27 17:41:43,188 INFO [finetune.py:976] (0/7) Epoch 22, batch 4800, loss[loss=0.1848, simple_loss=0.263, pruned_loss=0.05327, over 4924.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.241, pruned_loss=0.04923, over 956100.01 frames. ], batch size: 38, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:41:48,371 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125088.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:42:14,784 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125128.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:42:17,146 INFO [finetune.py:976] (0/7) Epoch 22, batch 4850, loss[loss=0.1543, simple_loss=0.2312, pruned_loss=0.03872, over 4811.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2434, pruned_loss=0.04953, over 953936.82 frames. ], batch size: 41, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:42:20,142 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=125136.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:42:44,288 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.525e+02 1.734e+02 2.236e+02 4.357e+02, threshold=3.469e+02, percent-clipped=1.0 +2023-04-27 17:42:50,809 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9671, 1.9837, 1.8056, 1.6558, 1.9760, 1.7135, 2.5960, 1.4983], + device='cuda:0'), covar=tensor([0.3438, 0.1778, 0.4227, 0.2750, 0.1627, 0.2433, 0.1439, 0.4780], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0347, 0.0423, 0.0352, 0.0378, 0.0371, 0.0367, 0.0416], + device='cuda:0'), out_proj_covar=tensor([9.9711e-05, 1.0380e-04, 1.2830e-04, 1.0588e-04, 1.1259e-04, 1.1064e-04, + 1.0783e-04, 1.2532e-04], device='cuda:0') +2023-04-27 17:43:03,300 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125176.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:43:13,296 INFO [finetune.py:976] (0/7) Epoch 22, batch 4900, loss[loss=0.1639, simple_loss=0.2358, pruned_loss=0.04597, over 4772.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2454, pruned_loss=0.05026, over 955159.72 frames. ], batch size: 28, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:43:17,139 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1339, 2.1333, 1.8290, 1.7742, 2.2830, 1.8927, 2.7941, 1.6227], + device='cuda:0'), covar=tensor([0.3485, 0.1802, 0.4671, 0.2985, 0.1564, 0.2341, 0.1225, 0.4336], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0346, 0.0423, 0.0351, 0.0378, 0.0371, 0.0366, 0.0416], + device='cuda:0'), out_proj_covar=tensor([9.9640e-05, 1.0364e-04, 1.2823e-04, 1.0574e-04, 1.1245e-04, 1.1063e-04, + 1.0767e-04, 1.2523e-04], device='cuda:0') +2023-04-27 17:43:18,225 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125189.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:44:14,015 INFO [finetune.py:976] (0/7) Epoch 22, batch 4950, loss[loss=0.1742, simple_loss=0.2545, pruned_loss=0.04699, over 4807.00 frames. ], tot_loss[loss=0.1737, simple_loss=0.2467, pruned_loss=0.0504, over 954556.19 frames. ], batch size: 39, lr: 3.13e-03, grad_scale: 32.0 +2023-04-27 17:44:50,833 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.480e+02 1.752e+02 2.103e+02 4.760e+02, threshold=3.505e+02, percent-clipped=3.0 +2023-04-27 17:45:13,456 INFO [finetune.py:976] (0/7) Epoch 22, batch 5000, loss[loss=0.1538, simple_loss=0.2273, pruned_loss=0.0402, over 4816.00 frames. ], tot_loss[loss=0.172, simple_loss=0.245, pruned_loss=0.04947, over 954700.89 frames. ], batch size: 41, lr: 3.13e-03, grad_scale: 16.0 +2023-04-27 17:45:32,794 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125294.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:46:05,807 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125319.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:46:18,221 INFO [finetune.py:976] (0/7) Epoch 22, batch 5050, loss[loss=0.1777, simple_loss=0.2414, pruned_loss=0.05703, over 4742.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.242, pruned_loss=0.0485, over 952990.54 frames. ], batch size: 54, lr: 3.13e-03, grad_scale: 16.0 +2023-04-27 17:46:22,003 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8653, 1.4276, 1.8496, 2.3524, 1.9721, 1.7481, 1.8176, 1.8077], + device='cuda:0'), covar=tensor([0.4274, 0.6736, 0.5778, 0.5294, 0.5524, 0.7484, 0.8187, 0.8549], + device='cuda:0'), in_proj_covar=tensor([0.0431, 0.0416, 0.0510, 0.0504, 0.0462, 0.0492, 0.0497, 0.0510], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 17:46:32,846 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=125342.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:46:43,439 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125356.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:46:47,131 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1844, 1.6370, 1.9858, 2.4631, 2.0477, 1.6575, 1.4085, 1.8281], + device='cuda:0'), covar=tensor([0.2627, 0.2952, 0.1480, 0.1685, 0.2350, 0.2346, 0.3918, 0.1865], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0244, 0.0226, 0.0314, 0.0220, 0.0233, 0.0227, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 17:46:47,737 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2867, 1.7320, 2.1783, 2.7315, 2.1801, 1.7658, 1.5612, 1.9648], + device='cuda:0'), covar=tensor([0.3346, 0.3176, 0.1640, 0.2175, 0.2688, 0.2671, 0.3841, 0.2023], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0244, 0.0226, 0.0314, 0.0220, 0.0233, 0.0227, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 17:46:48,792 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.011e+02 1.666e+02 1.966e+02 2.426e+02 4.294e+02, threshold=3.932e+02, percent-clipped=5.0 +2023-04-27 17:46:50,095 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=125367.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:46:55,690 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125376.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:46:59,780 INFO [finetune.py:976] (0/7) Epoch 22, batch 5100, loss[loss=0.1701, simple_loss=0.2435, pruned_loss=0.04834, over 4784.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2392, pruned_loss=0.04749, over 954206.47 frames. ], batch size: 29, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:47:24,379 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125417.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 17:47:26,779 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125421.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:47:33,408 INFO [finetune.py:976] (0/7) Epoch 22, batch 5150, loss[loss=0.2356, simple_loss=0.2907, pruned_loss=0.09024, over 4831.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2406, pruned_loss=0.0487, over 953294.24 frames. ], batch size: 33, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:47:37,063 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125437.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:47:47,834 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.99 vs. limit=5.0 +2023-04-27 17:48:02,908 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.720e+01 1.588e+02 1.846e+02 2.174e+02 3.765e+02, threshold=3.692e+02, percent-clipped=0.0 +2023-04-27 17:48:09,681 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125476.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:48:13,157 INFO [finetune.py:976] (0/7) Epoch 22, batch 5200, loss[loss=0.1413, simple_loss=0.2285, pruned_loss=0.02702, over 4895.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2444, pruned_loss=0.04966, over 955443.90 frames. ], batch size: 43, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:48:13,297 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125482.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:48:14,967 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125484.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:48:42,433 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=125524.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:48:47,218 INFO [finetune.py:976] (0/7) Epoch 22, batch 5250, loss[loss=0.1527, simple_loss=0.2168, pruned_loss=0.04435, over 4773.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2463, pruned_loss=0.05008, over 957302.33 frames. ], batch size: 26, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:48:53,970 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-27 17:49:09,788 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.600e+02 1.860e+02 2.252e+02 4.468e+02, threshold=3.721e+02, percent-clipped=2.0 +2023-04-27 17:49:20,594 INFO [finetune.py:976] (0/7) Epoch 22, batch 5300, loss[loss=0.2191, simple_loss=0.2796, pruned_loss=0.0793, over 4139.00 frames. ], tot_loss[loss=0.1745, simple_loss=0.2477, pruned_loss=0.05065, over 955547.07 frames. ], batch size: 65, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:49:24,345 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7596, 1.3021, 1.9112, 2.1858, 1.8711, 1.7711, 1.8132, 1.7988], + device='cuda:0'), covar=tensor([0.4527, 0.6803, 0.6187, 0.6207, 0.5773, 0.7721, 0.7483, 0.8687], + device='cuda:0'), in_proj_covar=tensor([0.0434, 0.0417, 0.0511, 0.0508, 0.0464, 0.0493, 0.0499, 0.0511], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 17:49:34,599 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125603.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:49:54,072 INFO [finetune.py:976] (0/7) Epoch 22, batch 5350, loss[loss=0.1962, simple_loss=0.273, pruned_loss=0.05973, over 4714.00 frames. ], tot_loss[loss=0.1746, simple_loss=0.2481, pruned_loss=0.05054, over 955453.76 frames. ], batch size: 59, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:49:56,035 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1473, 2.7819, 2.1467, 2.2732, 1.5767, 1.5626, 2.3999, 1.5369], + device='cuda:0'), covar=tensor([0.1534, 0.1423, 0.1399, 0.1646, 0.2238, 0.1851, 0.0879, 0.1993], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0210, 0.0168, 0.0203, 0.0199, 0.0184, 0.0154, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 17:50:15,153 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125664.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:50:16,103 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.320e+01 1.581e+02 1.868e+02 2.376e+02 4.466e+02, threshold=3.736e+02, percent-clipped=2.0 +2023-04-27 17:50:38,573 INFO [finetune.py:976] (0/7) Epoch 22, batch 5400, loss[loss=0.189, simple_loss=0.2309, pruned_loss=0.07359, over 4059.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2455, pruned_loss=0.05041, over 953678.00 frames. ], batch size: 17, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:51:19,935 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125712.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 17:51:40,595 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125725.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:51:45,247 INFO [finetune.py:976] (0/7) Epoch 22, batch 5450, loss[loss=0.1502, simple_loss=0.2128, pruned_loss=0.04377, over 4768.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2422, pruned_loss=0.04957, over 952909.18 frames. ], batch size: 28, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:51:45,328 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125732.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:52:28,294 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.173e+01 1.432e+02 1.653e+02 1.978e+02 3.483e+02, threshold=3.306e+02, percent-clipped=0.0 +2023-04-27 17:52:47,958 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125777.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:52:51,412 INFO [finetune.py:976] (0/7) Epoch 22, batch 5500, loss[loss=0.1636, simple_loss=0.237, pruned_loss=0.04507, over 4860.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2375, pruned_loss=0.04772, over 952769.41 frames. ], batch size: 44, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:52:58,406 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=125784.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:52:59,633 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=125786.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:53:57,325 INFO [finetune.py:976] (0/7) Epoch 22, batch 5550, loss[loss=0.1567, simple_loss=0.2357, pruned_loss=0.03889, over 4757.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2397, pruned_loss=0.04814, over 954011.34 frames. ], batch size: 28, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:54:03,350 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=125832.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:54:41,115 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.577e+02 1.939e+02 2.274e+02 3.783e+02, threshold=3.878e+02, percent-clipped=3.0 +2023-04-27 17:55:02,079 INFO [finetune.py:976] (0/7) Epoch 22, batch 5600, loss[loss=0.1462, simple_loss=0.2315, pruned_loss=0.03051, over 4892.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2435, pruned_loss=0.04922, over 954185.09 frames. ], batch size: 32, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:55:12,358 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5637, 1.3793, 1.2264, 1.4417, 1.8000, 1.4657, 1.2449, 1.1727], + device='cuda:0'), covar=tensor([0.1422, 0.1186, 0.1544, 0.1172, 0.0647, 0.1366, 0.1970, 0.2112], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0312, 0.0352, 0.0288, 0.0326, 0.0308, 0.0301, 0.0373], + device='cuda:0'), out_proj_covar=tensor([6.4329e-05, 6.4466e-05, 7.4264e-05, 5.7936e-05, 6.7201e-05, 6.4641e-05, + 6.2767e-05, 7.9178e-05], device='cuda:0') +2023-04-27 17:56:05,133 INFO [finetune.py:976] (0/7) Epoch 22, batch 5650, loss[loss=0.1731, simple_loss=0.2459, pruned_loss=0.05012, over 4829.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2467, pruned_loss=0.04963, over 953043.60 frames. ], batch size: 39, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:56:31,901 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=125959.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:56:35,408 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.900e+01 1.470e+02 1.804e+02 2.167e+02 3.376e+02, threshold=3.608e+02, percent-clipped=0.0 +2023-04-27 17:56:45,405 INFO [finetune.py:976] (0/7) Epoch 22, batch 5700, loss[loss=0.1687, simple_loss=0.2208, pruned_loss=0.05836, over 3978.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.242, pruned_loss=0.0488, over 934120.91 frames. ], batch size: 17, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:56:54,939 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=125998.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:56:56,215 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-126000.pt +2023-04-27 17:57:08,644 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-22.pt +2023-04-27 17:57:25,281 INFO [finetune.py:976] (0/7) Epoch 23, batch 0, loss[loss=0.1362, simple_loss=0.208, pruned_loss=0.03218, over 4328.00 frames. ], tot_loss[loss=0.1362, simple_loss=0.208, pruned_loss=0.03218, over 4328.00 frames. ], batch size: 19, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:57:25,283 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 17:57:31,346 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5329, 2.9748, 0.9783, 1.8014, 1.8621, 2.1932, 1.8462, 1.0515], + device='cuda:0'), covar=tensor([0.1308, 0.0966, 0.1905, 0.1151, 0.1049, 0.0914, 0.1497, 0.1747], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0240, 0.0138, 0.0120, 0.0132, 0.0151, 0.0118, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 17:57:32,370 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8438, 1.6750, 1.8821, 2.2293, 2.1918, 1.8045, 1.4791, 2.0161], + device='cuda:0'), covar=tensor([0.0765, 0.1176, 0.0742, 0.0532, 0.0594, 0.0808, 0.0751, 0.0568], + device='cuda:0'), in_proj_covar=tensor([0.0187, 0.0202, 0.0184, 0.0175, 0.0177, 0.0181, 0.0152, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 17:57:41,121 INFO [finetune.py:1010] (0/7) Epoch 23, validation: loss=0.1552, simple_loss=0.2246, pruned_loss=0.04292, over 2265189.00 frames. +2023-04-27 17:57:41,122 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 17:57:48,904 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126012.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 17:58:05,555 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126032.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:58:13,322 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2740, 1.7834, 2.1651, 2.6726, 2.1740, 1.7368, 1.6412, 2.1083], + device='cuda:0'), covar=tensor([0.2970, 0.2988, 0.1559, 0.2141, 0.2590, 0.2548, 0.3708, 0.1833], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0244, 0.0226, 0.0313, 0.0220, 0.0233, 0.0227, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 17:58:23,638 INFO [finetune.py:976] (0/7) Epoch 23, batch 50, loss[loss=0.1495, simple_loss=0.2297, pruned_loss=0.03463, over 4895.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2457, pruned_loss=0.04958, over 216368.98 frames. ], batch size: 43, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:58:23,758 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126059.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:58:24,776 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=126060.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:58:28,244 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.078e+02 1.502e+02 1.744e+02 2.087e+02 3.495e+02, threshold=3.488e+02, percent-clipped=0.0 +2023-04-27 17:58:41,184 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126077.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:58:42,973 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=126080.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:58:43,624 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126081.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:58:53,758 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1662, 1.7964, 2.0599, 2.3696, 2.3271, 1.8724, 1.8294, 2.1066], + device='cuda:0'), covar=tensor([0.0728, 0.0954, 0.0620, 0.0555, 0.0632, 0.0801, 0.0673, 0.0520], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0200, 0.0182, 0.0174, 0.0176, 0.0179, 0.0151, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 17:59:21,045 INFO [finetune.py:976] (0/7) Epoch 23, batch 100, loss[loss=0.1568, simple_loss=0.2246, pruned_loss=0.04451, over 4811.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2394, pruned_loss=0.04787, over 381410.10 frames. ], batch size: 25, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 17:59:42,924 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=126125.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 17:59:53,230 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6045, 0.6993, 1.5647, 2.0181, 1.7118, 1.5542, 1.5798, 1.5668], + device='cuda:0'), covar=tensor([0.3853, 0.5432, 0.5007, 0.4874, 0.4782, 0.6202, 0.6352, 0.6921], + device='cuda:0'), in_proj_covar=tensor([0.0433, 0.0416, 0.0508, 0.0507, 0.0463, 0.0492, 0.0499, 0.0508], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:00:14,304 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9001, 1.2122, 3.2685, 3.0320, 2.9254, 3.1884, 3.1606, 2.8806], + device='cuda:0'), covar=tensor([0.7184, 0.5570, 0.1512, 0.2099, 0.1386, 0.1848, 0.2008, 0.1700], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0305, 0.0404, 0.0406, 0.0347, 0.0407, 0.0313, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:00:28,538 INFO [finetune.py:976] (0/7) Epoch 23, batch 150, loss[loss=0.1496, simple_loss=0.2218, pruned_loss=0.03874, over 4874.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2365, pruned_loss=0.04766, over 511207.77 frames. ], batch size: 34, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 18:00:38,440 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.878e+01 1.494e+02 1.914e+02 2.300e+02 4.167e+02, threshold=3.828e+02, percent-clipped=5.0 +2023-04-27 18:01:10,262 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6234, 1.8571, 1.8756, 2.1953, 2.2199, 2.3679, 1.8851, 4.6593], + device='cuda:0'), covar=tensor([0.0494, 0.0713, 0.0715, 0.1116, 0.0541, 0.0451, 0.0646, 0.0101], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0037, 0.0037, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 18:01:15,045 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6727, 2.5413, 1.9771, 2.1828, 2.4894, 2.1311, 3.2017, 1.8391], + device='cuda:0'), covar=tensor([0.3299, 0.2148, 0.4451, 0.3257, 0.1890, 0.2433, 0.1642, 0.4271], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0348, 0.0424, 0.0351, 0.0378, 0.0372, 0.0367, 0.0419], + device='cuda:0'), out_proj_covar=tensor([9.9704e-05, 1.0424e-04, 1.2856e-04, 1.0561e-04, 1.1260e-04, 1.1091e-04, + 1.0804e-04, 1.2615e-04], device='cuda:0') +2023-04-27 18:01:33,991 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.25 vs. limit=5.0 +2023-04-27 18:01:34,971 INFO [finetune.py:976] (0/7) Epoch 23, batch 200, loss[loss=0.1658, simple_loss=0.2332, pruned_loss=0.04925, over 4156.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2352, pruned_loss=0.04764, over 606821.93 frames. ], batch size: 65, lr: 3.12e-03, grad_scale: 16.0 +2023-04-27 18:02:08,759 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9931, 2.2769, 2.0009, 2.3142, 1.6370, 2.0170, 2.0577, 1.5641], + device='cuda:0'), covar=tensor([0.2041, 0.1241, 0.0852, 0.1165, 0.3371, 0.1158, 0.1910, 0.2604], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0301, 0.0217, 0.0278, 0.0315, 0.0256, 0.0250, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1406e-04, 1.1916e-04, 8.5529e-05, 1.0963e-04, 1.2754e-04, 1.0138e-04, + 1.0075e-04, 1.0380e-04], device='cuda:0') +2023-04-27 18:02:19,986 INFO [finetune.py:976] (0/7) Epoch 23, batch 250, loss[loss=0.1614, simple_loss=0.24, pruned_loss=0.04141, over 4816.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2408, pruned_loss=0.04906, over 684756.09 frames. ], batch size: 33, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:02:20,606 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126259.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:02:24,185 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.438e+01 1.622e+02 1.975e+02 2.331e+02 7.246e+02, threshold=3.950e+02, percent-clipped=3.0 +2023-04-27 18:02:49,932 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8919, 1.2766, 5.3746, 5.0672, 4.6728, 5.1384, 4.6825, 4.7420], + device='cuda:0'), covar=tensor([0.7050, 0.6624, 0.1168, 0.1977, 0.1091, 0.1633, 0.1289, 0.1633], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0307, 0.0406, 0.0408, 0.0348, 0.0410, 0.0314, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:02:51,717 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=126307.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:02:53,380 INFO [finetune.py:976] (0/7) Epoch 23, batch 300, loss[loss=0.1773, simple_loss=0.2428, pruned_loss=0.05584, over 4878.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2439, pruned_loss=0.04958, over 744781.85 frames. ], batch size: 32, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:03:22,878 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126354.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:03:26,334 INFO [finetune.py:976] (0/7) Epoch 23, batch 350, loss[loss=0.1524, simple_loss=0.2277, pruned_loss=0.0386, over 4138.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2455, pruned_loss=0.04936, over 792086.07 frames. ], batch size: 65, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:03:30,386 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.435e+01 1.518e+02 1.849e+02 2.150e+02 3.695e+02, threshold=3.697e+02, percent-clipped=0.0 +2023-04-27 18:03:41,556 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126380.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:03:42,138 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126381.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:03:47,073 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6964, 1.8148, 2.0115, 2.1203, 1.9275, 2.0879, 2.0886, 2.1216], + device='cuda:0'), covar=tensor([0.3532, 0.5176, 0.4162, 0.3978, 0.5157, 0.6680, 0.4657, 0.4311], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0374, 0.0324, 0.0339, 0.0346, 0.0394, 0.0356, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 18:03:54,035 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.59 vs. limit=5.0 +2023-04-27 18:03:59,739 INFO [finetune.py:976] (0/7) Epoch 23, batch 400, loss[loss=0.1823, simple_loss=0.2341, pruned_loss=0.06529, over 4399.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.245, pruned_loss=0.04916, over 827753.31 frames. ], batch size: 19, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:04:30,797 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=126429.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:04:38,554 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4173, 1.0910, 1.2343, 1.2379, 1.5437, 1.2789, 1.0991, 1.1509], + device='cuda:0'), covar=tensor([0.1779, 0.1436, 0.1891, 0.1443, 0.0879, 0.1644, 0.1880, 0.2273], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0311, 0.0352, 0.0286, 0.0325, 0.0307, 0.0298, 0.0371], + device='cuda:0'), out_proj_covar=tensor([6.4264e-05, 6.4308e-05, 7.4199e-05, 5.7521e-05, 6.6940e-05, 6.4426e-05, + 6.2163e-05, 7.8663e-05], device='cuda:0') +2023-04-27 18:04:42,854 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126441.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:04:46,495 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8485, 4.0431, 2.7146, 4.5497, 4.0453, 3.8486, 1.5909, 3.8616], + device='cuda:0'), covar=tensor([0.1918, 0.1069, 0.3427, 0.1509, 0.3010, 0.1790, 0.6267, 0.2392], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0217, 0.0253, 0.0305, 0.0297, 0.0246, 0.0275, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 18:04:48,384 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126450.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:04:54,266 INFO [finetune.py:976] (0/7) Epoch 23, batch 450, loss[loss=0.1579, simple_loss=0.2256, pruned_loss=0.04508, over 4713.00 frames. ], tot_loss[loss=0.1709, simple_loss=0.2441, pruned_loss=0.04889, over 856582.03 frames. ], batch size: 23, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:04:58,394 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.511e+02 1.837e+02 2.263e+02 5.457e+02, threshold=3.673e+02, percent-clipped=4.0 +2023-04-27 18:05:27,628 INFO [finetune.py:976] (0/7) Epoch 23, batch 500, loss[loss=0.1357, simple_loss=0.2155, pruned_loss=0.02798, over 4742.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2422, pruned_loss=0.04848, over 878495.64 frames. ], batch size: 27, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:05:29,432 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126511.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:06:06,864 INFO [finetune.py:976] (0/7) Epoch 23, batch 550, loss[loss=0.1845, simple_loss=0.2423, pruned_loss=0.0634, over 4828.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2402, pruned_loss=0.04844, over 896250.69 frames. ], batch size: 30, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:06:16,308 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.502e+02 1.815e+02 2.160e+02 5.481e+02, threshold=3.630e+02, percent-clipped=1.0 +2023-04-27 18:06:59,472 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 18:07:12,073 INFO [finetune.py:976] (0/7) Epoch 23, batch 600, loss[loss=0.1519, simple_loss=0.2231, pruned_loss=0.04038, over 4821.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2421, pruned_loss=0.04974, over 908878.97 frames. ], batch size: 25, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:07:33,589 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-27 18:07:55,167 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.44 vs. limit=5.0 +2023-04-27 18:08:13,565 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4928, 1.6664, 1.4002, 1.0721, 1.1569, 1.1092, 1.4025, 1.0639], + device='cuda:0'), covar=tensor([0.1883, 0.1313, 0.1626, 0.1838, 0.2395, 0.2131, 0.1130, 0.2285], + device='cuda:0'), in_proj_covar=tensor([0.0196, 0.0210, 0.0168, 0.0204, 0.0199, 0.0185, 0.0155, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 18:08:14,762 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=126654.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:08:17,790 INFO [finetune.py:976] (0/7) Epoch 23, batch 650, loss[loss=0.1401, simple_loss=0.2019, pruned_loss=0.03913, over 4718.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2452, pruned_loss=0.05033, over 920132.66 frames. ], batch size: 23, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:08:26,640 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.220e+02 1.615e+02 1.965e+02 2.363e+02 5.710e+02, threshold=3.929e+02, percent-clipped=5.0 +2023-04-27 18:09:20,342 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=126702.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:09:24,630 INFO [finetune.py:976] (0/7) Epoch 23, batch 700, loss[loss=0.1492, simple_loss=0.2127, pruned_loss=0.04287, over 4719.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2459, pruned_loss=0.04996, over 927799.36 frames. ], batch size: 23, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:10:03,667 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126736.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:10:19,571 INFO [finetune.py:976] (0/7) Epoch 23, batch 750, loss[loss=0.182, simple_loss=0.2645, pruned_loss=0.04971, over 4898.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2468, pruned_loss=0.0499, over 933928.03 frames. ], batch size: 36, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:10:23,175 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.166e+01 1.471e+02 1.836e+02 2.076e+02 2.754e+02, threshold=3.672e+02, percent-clipped=0.0 +2023-04-27 18:10:34,009 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-27 18:10:35,048 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5070, 3.3452, 0.9695, 1.8522, 1.8252, 2.4234, 1.8538, 1.0258], + device='cuda:0'), covar=tensor([0.1362, 0.0881, 0.1954, 0.1215, 0.1097, 0.0921, 0.1498, 0.1902], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0237, 0.0136, 0.0118, 0.0131, 0.0150, 0.0116, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 18:10:47,358 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6024, 3.5433, 1.1165, 1.8361, 1.9235, 2.4644, 1.9574, 1.0885], + device='cuda:0'), covar=tensor([0.1435, 0.1179, 0.1882, 0.1269, 0.1108, 0.1055, 0.1475, 0.1870], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0238, 0.0136, 0.0119, 0.0131, 0.0150, 0.0116, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 18:10:51,755 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=126806.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:10:53,513 INFO [finetune.py:976] (0/7) Epoch 23, batch 800, loss[loss=0.1239, simple_loss=0.1994, pruned_loss=0.02419, over 4787.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2446, pruned_loss=0.04888, over 936349.91 frames. ], batch size: 28, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:10:57,926 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=126816.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:11:27,335 INFO [finetune.py:976] (0/7) Epoch 23, batch 850, loss[loss=0.1606, simple_loss=0.2268, pruned_loss=0.04718, over 4862.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2428, pruned_loss=0.04853, over 940319.16 frames. ], batch size: 34, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:11:30,954 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.555e+01 1.428e+02 1.632e+02 2.009e+02 4.490e+02, threshold=3.264e+02, percent-clipped=1.0 +2023-04-27 18:11:44,244 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=126877.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:12:28,955 INFO [finetune.py:976] (0/7) Epoch 23, batch 900, loss[loss=0.179, simple_loss=0.2338, pruned_loss=0.06211, over 4706.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2401, pruned_loss=0.04805, over 943049.23 frames. ], batch size: 23, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:13:36,983 INFO [finetune.py:976] (0/7) Epoch 23, batch 950, loss[loss=0.1282, simple_loss=0.2003, pruned_loss=0.0281, over 4722.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.238, pruned_loss=0.04758, over 946420.92 frames. ], batch size: 23, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:13:40,660 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.507e+02 1.824e+02 2.265e+02 3.979e+02, threshold=3.647e+02, percent-clipped=2.0 +2023-04-27 18:14:11,164 INFO [finetune.py:976] (0/7) Epoch 23, batch 1000, loss[loss=0.1762, simple_loss=0.2498, pruned_loss=0.05132, over 4930.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2402, pruned_loss=0.04822, over 947606.23 frames. ], batch size: 33, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:14:28,622 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127036.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:14:35,843 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7817, 2.4278, 1.8938, 1.8735, 1.3476, 1.3343, 1.9545, 1.3010], + device='cuda:0'), covar=tensor([0.1641, 0.1380, 0.1384, 0.1668, 0.2209, 0.1906, 0.0908, 0.1972], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0210, 0.0169, 0.0203, 0.0199, 0.0185, 0.0155, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 18:14:45,073 INFO [finetune.py:976] (0/7) Epoch 23, batch 1050, loss[loss=0.1733, simple_loss=0.2472, pruned_loss=0.04972, over 4818.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2438, pruned_loss=0.04941, over 947148.49 frames. ], batch size: 33, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:14:48,718 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.574e+02 1.858e+02 2.309e+02 5.197e+02, threshold=3.716e+02, percent-clipped=5.0 +2023-04-27 18:15:00,298 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=127084.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:15:16,227 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127106.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:15:18,463 INFO [finetune.py:976] (0/7) Epoch 23, batch 1100, loss[loss=0.1955, simple_loss=0.2695, pruned_loss=0.06081, over 4799.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2444, pruned_loss=0.049, over 948588.26 frames. ], batch size: 45, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:15:42,006 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.89 vs. limit=5.0 +2023-04-27 18:15:47,875 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=127154.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:15:51,853 INFO [finetune.py:976] (0/7) Epoch 23, batch 1150, loss[loss=0.1756, simple_loss=0.2461, pruned_loss=0.05258, over 4919.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2442, pruned_loss=0.04845, over 949617.67 frames. ], batch size: 42, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:15:56,440 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.449e+02 1.782e+02 2.216e+02 4.823e+02, threshold=3.563e+02, percent-clipped=1.0 +2023-04-27 18:15:58,451 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.36 vs. limit=5.0 +2023-04-27 18:16:00,785 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127172.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:16:25,336 INFO [finetune.py:976] (0/7) Epoch 23, batch 1200, loss[loss=0.1717, simple_loss=0.2437, pruned_loss=0.04988, over 4887.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2431, pruned_loss=0.04824, over 950817.68 frames. ], batch size: 32, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:16:31,793 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6430, 1.4856, 1.3100, 1.6415, 1.9351, 1.5991, 1.4111, 1.2665], + device='cuda:0'), covar=tensor([0.1853, 0.1265, 0.1682, 0.1470, 0.0837, 0.1444, 0.1755, 0.1948], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0311, 0.0352, 0.0288, 0.0327, 0.0309, 0.0299, 0.0372], + device='cuda:0'), out_proj_covar=tensor([6.4577e-05, 6.4366e-05, 7.4265e-05, 5.8052e-05, 6.7436e-05, 6.4867e-05, + 6.2444e-05, 7.8954e-05], device='cuda:0') +2023-04-27 18:16:52,080 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127249.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:17:03,695 INFO [finetune.py:976] (0/7) Epoch 23, batch 1250, loss[loss=0.1577, simple_loss=0.2295, pruned_loss=0.04291, over 4813.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2417, pruned_loss=0.04851, over 951402.29 frames. ], batch size: 39, lr: 3.11e-03, grad_scale: 16.0 +2023-04-27 18:17:14,007 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.661e+01 1.481e+02 1.752e+02 2.149e+02 4.730e+02, threshold=3.504e+02, percent-clipped=1.0 +2023-04-27 18:17:15,357 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7744, 1.3354, 1.4842, 1.5191, 1.9021, 1.6043, 1.2536, 1.4241], + device='cuda:0'), covar=tensor([0.1880, 0.1422, 0.1983, 0.1391, 0.0920, 0.1435, 0.1976, 0.2343], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0310, 0.0352, 0.0288, 0.0326, 0.0309, 0.0299, 0.0372], + device='cuda:0'), out_proj_covar=tensor([6.4386e-05, 6.4214e-05, 7.4162e-05, 5.7955e-05, 6.7189e-05, 6.4800e-05, + 6.2323e-05, 7.8823e-05], device='cuda:0') +2023-04-27 18:17:24,264 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7647, 2.1252, 1.8338, 1.5068, 1.3198, 1.3345, 1.8308, 1.2708], + device='cuda:0'), covar=tensor([0.1715, 0.1367, 0.1468, 0.1888, 0.2372, 0.1976, 0.0970, 0.2077], + device='cuda:0'), in_proj_covar=tensor([0.0195, 0.0209, 0.0168, 0.0202, 0.0198, 0.0184, 0.0154, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 18:18:08,737 INFO [finetune.py:976] (0/7) Epoch 23, batch 1300, loss[loss=0.1601, simple_loss=0.2319, pruned_loss=0.04413, over 4823.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2391, pruned_loss=0.04776, over 953546.41 frames. ], batch size: 39, lr: 3.11e-03, grad_scale: 32.0 +2023-04-27 18:18:09,501 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127310.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:18:21,071 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7063, 1.7823, 0.7134, 1.4123, 1.9105, 1.5693, 1.4938, 1.5722], + device='cuda:0'), covar=tensor([0.0481, 0.0355, 0.0360, 0.0552, 0.0256, 0.0505, 0.0481, 0.0575], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 18:18:59,966 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.57 vs. limit=2.0 +2023-04-27 18:19:11,406 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5761, 0.7333, 1.4927, 1.9673, 1.6565, 1.4695, 1.5096, 1.4962], + device='cuda:0'), covar=tensor([0.3852, 0.5872, 0.5086, 0.4944, 0.4963, 0.6256, 0.6776, 0.7570], + device='cuda:0'), in_proj_covar=tensor([0.0435, 0.0418, 0.0510, 0.0509, 0.0465, 0.0495, 0.0500, 0.0511], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:19:13,548 INFO [finetune.py:976] (0/7) Epoch 23, batch 1350, loss[loss=0.1612, simple_loss=0.2402, pruned_loss=0.0411, over 4907.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2388, pruned_loss=0.0473, over 954450.74 frames. ], batch size: 43, lr: 3.11e-03, grad_scale: 32.0 +2023-04-27 18:19:14,880 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4090, 1.9539, 2.2290, 2.6337, 2.6183, 2.1721, 2.0177, 2.3599], + device='cuda:0'), covar=tensor([0.0694, 0.1036, 0.0651, 0.0523, 0.0504, 0.0785, 0.0657, 0.0482], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0200, 0.0182, 0.0172, 0.0175, 0.0179, 0.0149, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:19:23,394 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.776e+01 1.488e+02 1.772e+02 2.253e+02 3.450e+02, threshold=3.544e+02, percent-clipped=0.0 +2023-04-27 18:19:58,531 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9839, 1.8958, 1.7613, 1.5938, 2.1755, 1.7529, 2.6573, 1.5783], + device='cuda:0'), covar=tensor([0.3717, 0.1981, 0.4787, 0.3108, 0.1665, 0.2636, 0.1263, 0.4519], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0347, 0.0426, 0.0350, 0.0376, 0.0374, 0.0369, 0.0417], + device='cuda:0'), out_proj_covar=tensor([9.9775e-05, 1.0367e-04, 1.2913e-04, 1.0526e-04, 1.1192e-04, 1.1139e-04, + 1.0834e-04, 1.2575e-04], device='cuda:0') +2023-04-27 18:20:19,978 INFO [finetune.py:976] (0/7) Epoch 23, batch 1400, loss[loss=0.1578, simple_loss=0.2443, pruned_loss=0.03565, over 4805.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2399, pruned_loss=0.04756, over 952059.14 frames. ], batch size: 51, lr: 3.11e-03, grad_scale: 32.0 +2023-04-27 18:20:42,632 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127427.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 18:20:51,544 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-27 18:20:53,952 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7305, 1.5510, 2.0293, 2.0585, 1.5534, 1.4183, 1.6142, 1.0099], + device='cuda:0'), covar=tensor([0.0459, 0.0627, 0.0331, 0.0484, 0.0660, 0.1067, 0.0584, 0.0612], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0068, 0.0067, 0.0068, 0.0075, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 18:21:24,264 INFO [finetune.py:976] (0/7) Epoch 23, batch 1450, loss[loss=0.1576, simple_loss=0.2304, pruned_loss=0.0424, over 4770.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2421, pruned_loss=0.04798, over 952944.88 frames. ], batch size: 28, lr: 3.11e-03, grad_scale: 32.0 +2023-04-27 18:21:34,073 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.543e+02 1.916e+02 2.299e+02 4.494e+02, threshold=3.833e+02, percent-clipped=8.0 +2023-04-27 18:21:43,898 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127472.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:22:06,034 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127488.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 18:22:29,895 INFO [finetune.py:976] (0/7) Epoch 23, batch 1500, loss[loss=0.1634, simple_loss=0.2414, pruned_loss=0.04272, over 4902.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2439, pruned_loss=0.04879, over 954340.23 frames. ], batch size: 37, lr: 3.11e-03, grad_scale: 32.0 +2023-04-27 18:22:37,627 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=127520.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:22:43,183 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127529.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:23:19,694 INFO [finetune.py:976] (0/7) Epoch 23, batch 1550, loss[loss=0.1382, simple_loss=0.2316, pruned_loss=0.02242, over 4756.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2436, pruned_loss=0.04847, over 953801.03 frames. ], batch size: 28, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:23:23,359 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1785, 1.6159, 2.0044, 2.2205, 2.0028, 1.6065, 1.1529, 1.7106], + device='cuda:0'), covar=tensor([0.3202, 0.3371, 0.1654, 0.2056, 0.2543, 0.2784, 0.4342, 0.2133], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0246, 0.0228, 0.0316, 0.0220, 0.0235, 0.0228, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 18:23:23,842 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.538e+02 1.819e+02 2.091e+02 4.341e+02, threshold=3.638e+02, percent-clipped=1.0 +2023-04-27 18:23:25,224 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0205, 2.5314, 2.1538, 2.4444, 1.8242, 2.1159, 2.1649, 1.6013], + device='cuda:0'), covar=tensor([0.2283, 0.1180, 0.0918, 0.1251, 0.3506, 0.1335, 0.1986, 0.2935], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0297, 0.0213, 0.0275, 0.0311, 0.0254, 0.0246, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1302e-04, 1.1767e-04, 8.4137e-05, 1.0843e-04, 1.2590e-04, 1.0030e-04, + 9.9461e-05, 1.0283e-04], device='cuda:0') +2023-04-27 18:23:41,772 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127590.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:23:50,929 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127605.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:23:53,256 INFO [finetune.py:976] (0/7) Epoch 23, batch 1600, loss[loss=0.1666, simple_loss=0.2386, pruned_loss=0.04734, over 4890.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2421, pruned_loss=0.04797, over 954845.81 frames. ], batch size: 32, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:24:26,892 INFO [finetune.py:976] (0/7) Epoch 23, batch 1650, loss[loss=0.1394, simple_loss=0.2151, pruned_loss=0.03189, over 4850.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2401, pruned_loss=0.04789, over 955376.08 frames. ], batch size: 25, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:24:28,858 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127662.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:24:31,043 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.420e+02 1.883e+02 2.201e+02 3.441e+02, threshold=3.766e+02, percent-clipped=0.0 +2023-04-27 18:25:00,768 INFO [finetune.py:976] (0/7) Epoch 23, batch 1700, loss[loss=0.2598, simple_loss=0.2965, pruned_loss=0.1115, over 4071.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2391, pruned_loss=0.04826, over 954961.29 frames. ], batch size: 65, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:25:10,321 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127723.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:25:16,292 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1469, 2.5636, 1.1387, 1.4130, 2.0920, 1.2899, 3.2758, 1.7847], + device='cuda:0'), covar=tensor([0.0598, 0.0507, 0.0709, 0.1286, 0.0450, 0.1011, 0.0287, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 18:25:34,587 INFO [finetune.py:976] (0/7) Epoch 23, batch 1750, loss[loss=0.187, simple_loss=0.2732, pruned_loss=0.05043, over 4815.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.241, pruned_loss=0.04873, over 953504.39 frames. ], batch size: 51, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:25:38,238 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.809e+01 1.624e+02 1.849e+02 2.197e+02 5.063e+02, threshold=3.698e+02, percent-clipped=3.0 +2023-04-27 18:25:48,410 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127779.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:25:50,830 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127783.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 18:26:14,170 INFO [finetune.py:976] (0/7) Epoch 23, batch 1800, loss[loss=0.1385, simple_loss=0.2057, pruned_loss=0.03566, over 4719.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2427, pruned_loss=0.0491, over 954388.94 frames. ], batch size: 23, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:26:57,468 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127840.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:27:09,575 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127850.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:27:21,426 INFO [finetune.py:976] (0/7) Epoch 23, batch 1850, loss[loss=0.1686, simple_loss=0.2481, pruned_loss=0.04451, over 4816.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2448, pruned_loss=0.04953, over 953744.83 frames. ], batch size: 39, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:27:31,374 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.092e+02 1.632e+02 1.910e+02 2.193e+02 3.676e+02, threshold=3.820e+02, percent-clipped=0.0 +2023-04-27 18:27:39,312 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0054, 1.3416, 1.2738, 1.5881, 1.4823, 1.5003, 1.2879, 2.4638], + device='cuda:0'), covar=tensor([0.0673, 0.0837, 0.0842, 0.1222, 0.0670, 0.0509, 0.0763, 0.0230], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 18:27:40,539 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=127872.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:27:49,864 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=127885.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:27:58,317 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4315, 2.2562, 2.5661, 2.8709, 2.7742, 2.2287, 1.9302, 2.4593], + device='cuda:0'), covar=tensor([0.0791, 0.0941, 0.0605, 0.0591, 0.0632, 0.0882, 0.0788, 0.0564], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0201, 0.0184, 0.0174, 0.0177, 0.0181, 0.0151, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:28:03,539 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=127905.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:28:06,380 INFO [finetune.py:976] (0/7) Epoch 23, batch 1900, loss[loss=0.1993, simple_loss=0.2768, pruned_loss=0.06088, over 4888.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2466, pruned_loss=0.04995, over 955017.92 frames. ], batch size: 35, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:28:07,683 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127911.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:28:22,806 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-27 18:28:31,637 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7466, 2.1968, 2.4589, 3.2224, 2.4848, 2.0222, 1.9094, 2.5380], + device='cuda:0'), covar=tensor([0.3014, 0.3032, 0.1694, 0.2146, 0.2621, 0.2603, 0.3557, 0.1863], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0247, 0.0230, 0.0318, 0.0222, 0.0236, 0.0229, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 18:28:32,855 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=127933.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:28:57,592 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=127953.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:29:07,612 INFO [finetune.py:976] (0/7) Epoch 23, batch 1950, loss[loss=0.1534, simple_loss=0.2263, pruned_loss=0.04021, over 4843.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2452, pruned_loss=0.04947, over 953181.38 frames. ], batch size: 44, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:29:16,663 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.063e+02 1.493e+02 1.855e+02 2.257e+02 4.055e+02, threshold=3.710e+02, percent-clipped=1.0 +2023-04-27 18:29:26,243 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-27 18:29:29,377 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.39 vs. limit=2.0 +2023-04-27 18:29:53,168 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1543, 1.8692, 2.2983, 2.5767, 2.2110, 2.0559, 2.1739, 2.1415], + device='cuda:0'), covar=tensor([0.4659, 0.6779, 0.7223, 0.5621, 0.6087, 0.8235, 0.8660, 0.9447], + device='cuda:0'), in_proj_covar=tensor([0.0433, 0.0417, 0.0507, 0.0505, 0.0463, 0.0492, 0.0496, 0.0508], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:29:54,191 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-128000.pt +2023-04-27 18:30:02,324 INFO [finetune.py:976] (0/7) Epoch 23, batch 2000, loss[loss=0.1527, simple_loss=0.2127, pruned_loss=0.04637, over 4805.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2424, pruned_loss=0.04845, over 955282.28 frames. ], batch size: 51, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:30:07,955 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128018.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:30:35,018 INFO [finetune.py:976] (0/7) Epoch 23, batch 2050, loss[loss=0.156, simple_loss=0.2283, pruned_loss=0.04181, over 4823.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2391, pruned_loss=0.04766, over 955631.96 frames. ], batch size: 33, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:30:39,640 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.902e+01 1.493e+02 1.754e+02 2.070e+02 4.110e+02, threshold=3.508e+02, percent-clipped=2.0 +2023-04-27 18:30:45,244 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7347, 1.4291, 4.6411, 4.2946, 4.0482, 4.4135, 4.2666, 4.0113], + device='cuda:0'), covar=tensor([0.6687, 0.5991, 0.1093, 0.1932, 0.1122, 0.1871, 0.1007, 0.1836], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0307, 0.0408, 0.0410, 0.0350, 0.0412, 0.0317, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:30:50,718 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128083.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 18:31:00,262 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-04-27 18:31:08,824 INFO [finetune.py:976] (0/7) Epoch 23, batch 2100, loss[loss=0.1272, simple_loss=0.211, pruned_loss=0.02168, over 4818.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2379, pruned_loss=0.04718, over 956171.19 frames. ], batch size: 51, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:31:22,424 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-27 18:31:22,851 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=128131.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 18:31:25,338 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128135.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:31:42,234 INFO [finetune.py:976] (0/7) Epoch 23, batch 2150, loss[loss=0.1936, simple_loss=0.2516, pruned_loss=0.06777, over 4895.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.242, pruned_loss=0.0485, over 954439.53 frames. ], batch size: 32, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:31:46,840 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.634e+02 1.958e+02 2.421e+02 4.804e+02, threshold=3.916e+02, percent-clipped=1.0 +2023-04-27 18:31:50,527 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8303, 2.8368, 2.1924, 3.2485, 2.8264, 2.8105, 1.1582, 2.8252], + device='cuda:0'), covar=tensor([0.2202, 0.1807, 0.3742, 0.3315, 0.4111, 0.2319, 0.5944, 0.2923], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0218, 0.0253, 0.0306, 0.0297, 0.0246, 0.0275, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 18:31:50,711 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 18:31:59,121 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128185.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:32:02,646 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6425, 1.5549, 1.9469, 1.9926, 1.4584, 1.2134, 1.5887, 0.8493], + device='cuda:0'), covar=tensor([0.0787, 0.0742, 0.0512, 0.0849, 0.0819, 0.1041, 0.0764, 0.0762], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0067, 0.0068, 0.0075, 0.0096, 0.0073, 0.0065], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 18:32:09,953 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9081, 1.6967, 2.0859, 2.2794, 1.9255, 1.8235, 1.9207, 1.8879], + device='cuda:0'), covar=tensor([0.4695, 0.7217, 0.7259, 0.6137, 0.6371, 0.8505, 0.8945, 1.0212], + device='cuda:0'), in_proj_covar=tensor([0.0434, 0.0418, 0.0509, 0.0506, 0.0464, 0.0494, 0.0499, 0.0509], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:32:13,545 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128206.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:32:15,343 INFO [finetune.py:976] (0/7) Epoch 23, batch 2200, loss[loss=0.1871, simple_loss=0.2613, pruned_loss=0.05652, over 4819.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2452, pruned_loss=0.04948, over 951938.72 frames. ], batch size: 30, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:32:20,202 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 18:32:39,406 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128228.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:32:42,383 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=128233.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:32:48,201 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-04-27 18:33:12,838 INFO [finetune.py:976] (0/7) Epoch 23, batch 2250, loss[loss=0.1428, simple_loss=0.2093, pruned_loss=0.0381, over 4027.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2452, pruned_loss=0.04924, over 951695.85 frames. ], batch size: 16, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:33:22,487 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.137e+02 1.532e+02 1.818e+02 2.293e+02 4.621e+02, threshold=3.635e+02, percent-clipped=2.0 +2023-04-27 18:33:57,140 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8180, 2.0316, 1.1237, 1.5810, 2.1532, 1.6505, 1.6440, 1.7500], + device='cuda:0'), covar=tensor([0.0466, 0.0341, 0.0277, 0.0520, 0.0221, 0.0483, 0.0478, 0.0519], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 18:34:26,340 INFO [finetune.py:976] (0/7) Epoch 23, batch 2300, loss[loss=0.1649, simple_loss=0.2432, pruned_loss=0.04323, over 4882.00 frames. ], tot_loss[loss=0.1724, simple_loss=0.2458, pruned_loss=0.0495, over 954601.16 frames. ], batch size: 43, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:34:37,304 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128318.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:34:37,957 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7459, 1.5538, 1.9739, 2.0593, 1.5490, 1.4163, 1.6618, 1.0659], + device='cuda:0'), covar=tensor([0.0461, 0.0768, 0.0427, 0.0639, 0.0837, 0.1167, 0.0678, 0.0643], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0068, 0.0067, 0.0068, 0.0075, 0.0095, 0.0073, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 18:34:39,027 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128320.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:35:34,259 INFO [finetune.py:976] (0/7) Epoch 23, batch 2350, loss[loss=0.1339, simple_loss=0.2067, pruned_loss=0.03059, over 4258.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2439, pruned_loss=0.04923, over 952836.16 frames. ], batch size: 65, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:35:37,973 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.560e+02 1.785e+02 2.201e+02 3.854e+02, threshold=3.569e+02, percent-clipped=2.0 +2023-04-27 18:35:38,685 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=128366.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:36:01,121 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128381.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:36:31,442 INFO [finetune.py:976] (0/7) Epoch 23, batch 2400, loss[loss=0.2015, simple_loss=0.2679, pruned_loss=0.06756, over 4929.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2416, pruned_loss=0.04895, over 953265.45 frames. ], batch size: 38, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:36:34,502 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128413.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:36:58,488 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1079, 3.0695, 0.8874, 1.3901, 1.5434, 2.1317, 1.6734, 0.9912], + device='cuda:0'), covar=tensor([0.2161, 0.1500, 0.2444, 0.2059, 0.1588, 0.1483, 0.1939, 0.2186], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0238, 0.0137, 0.0119, 0.0131, 0.0150, 0.0117, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 18:37:01,388 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128435.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:37:10,127 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0868, 1.4190, 1.3131, 1.7625, 1.5863, 1.6955, 1.3273, 3.0061], + device='cuda:0'), covar=tensor([0.0695, 0.0829, 0.0841, 0.1166, 0.0662, 0.0512, 0.0773, 0.0210], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0042, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 18:37:29,232 INFO [finetune.py:976] (0/7) Epoch 23, batch 2450, loss[loss=0.1372, simple_loss=0.2103, pruned_loss=0.03206, over 4923.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2389, pruned_loss=0.04817, over 956704.69 frames. ], batch size: 37, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:37:29,357 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6602, 2.1640, 1.7620, 2.0636, 1.5921, 1.7599, 1.7080, 1.3438], + device='cuda:0'), covar=tensor([0.1791, 0.1165, 0.0821, 0.1075, 0.3153, 0.1132, 0.1910, 0.2434], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0297, 0.0212, 0.0274, 0.0311, 0.0253, 0.0246, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1278e-04, 1.1769e-04, 8.3678e-05, 1.0793e-04, 1.2563e-04, 1.0008e-04, + 9.9110e-05, 1.0262e-04], device='cuda:0') +2023-04-27 18:37:35,181 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.697e+01 1.581e+02 1.977e+02 2.297e+02 4.168e+02, threshold=3.955e+02, percent-clipped=2.0 +2023-04-27 18:37:46,720 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128474.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:37:56,933 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=128483.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:38:27,869 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128506.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:38:29,611 INFO [finetune.py:976] (0/7) Epoch 23, batch 2500, loss[loss=0.1449, simple_loss=0.227, pruned_loss=0.03142, over 4792.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2401, pruned_loss=0.04832, over 957472.46 frames. ], batch size: 29, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:38:52,077 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4584, 1.3264, 1.8725, 1.8288, 1.3178, 1.1758, 1.5323, 0.8916], + device='cuda:0'), covar=tensor([0.0602, 0.0703, 0.0360, 0.0667, 0.0722, 0.1115, 0.0582, 0.0745], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0068, 0.0066, 0.0068, 0.0075, 0.0095, 0.0073, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 18:38:52,853 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.45 vs. limit=5.0 +2023-04-27 18:38:53,248 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128528.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:39:01,141 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2730, 2.9960, 1.1412, 1.6225, 2.3785, 1.2438, 3.9481, 1.9042], + device='cuda:0'), covar=tensor([0.0725, 0.0794, 0.0884, 0.1258, 0.0458, 0.1126, 0.0260, 0.0627], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 18:39:10,947 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5948, 1.7413, 0.7650, 1.2802, 1.8822, 1.4375, 1.3625, 1.4589], + device='cuda:0'), covar=tensor([0.0477, 0.0351, 0.0344, 0.0545, 0.0252, 0.0486, 0.0506, 0.0554], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 18:39:29,764 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=128554.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:39:32,694 INFO [finetune.py:976] (0/7) Epoch 23, batch 2550, loss[loss=0.2158, simple_loss=0.2933, pruned_loss=0.06912, over 4873.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2442, pruned_loss=0.0495, over 957417.89 frames. ], batch size: 34, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:39:42,963 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.730e+02 1.875e+02 2.334e+02 4.772e+02, threshold=3.751e+02, percent-clipped=1.0 +2023-04-27 18:39:55,793 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=128576.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:40:15,099 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2493, 1.4851, 1.7356, 1.8231, 1.8116, 1.8248, 1.7739, 1.7806], + device='cuda:0'), covar=tensor([0.3891, 0.4930, 0.4225, 0.4125, 0.5127, 0.6804, 0.4530, 0.4795], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0373, 0.0324, 0.0337, 0.0346, 0.0394, 0.0356, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 18:40:40,367 INFO [finetune.py:976] (0/7) Epoch 23, batch 2600, loss[loss=0.2376, simple_loss=0.2896, pruned_loss=0.09281, over 4912.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2447, pruned_loss=0.0496, over 956563.58 frames. ], batch size: 36, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:41:06,815 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4767, 2.8844, 0.9572, 1.6370, 2.3035, 1.4349, 4.0733, 2.0085], + device='cuda:0'), covar=tensor([0.0664, 0.0849, 0.0891, 0.1255, 0.0489, 0.1029, 0.0222, 0.0600], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0047, 0.0050, 0.0052, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 18:41:07,418 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9086, 1.1654, 4.9739, 4.6317, 4.3023, 4.7985, 4.4361, 4.3339], + device='cuda:0'), covar=tensor([0.6952, 0.6408, 0.1051, 0.2021, 0.1175, 0.1087, 0.1544, 0.1888], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0306, 0.0406, 0.0409, 0.0350, 0.0411, 0.0315, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:41:08,036 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3042, 1.3347, 1.4117, 1.5539, 1.6636, 1.2089, 0.8433, 1.4423], + device='cuda:0'), covar=tensor([0.0823, 0.1192, 0.0879, 0.0586, 0.0632, 0.0928, 0.0892, 0.0612], + device='cuda:0'), in_proj_covar=tensor([0.0187, 0.0202, 0.0184, 0.0173, 0.0177, 0.0181, 0.0151, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:41:19,798 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-27 18:41:44,291 INFO [finetune.py:976] (0/7) Epoch 23, batch 2650, loss[loss=0.1688, simple_loss=0.2423, pruned_loss=0.04768, over 4822.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2456, pruned_loss=0.04929, over 956586.74 frames. ], batch size: 39, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:41:53,483 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.191e+02 1.651e+02 1.881e+02 2.286e+02 5.160e+02, threshold=3.761e+02, percent-clipped=1.0 +2023-04-27 18:42:11,800 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128676.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:42:55,229 INFO [finetune.py:976] (0/7) Epoch 23, batch 2700, loss[loss=0.1753, simple_loss=0.2464, pruned_loss=0.05213, over 4822.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2431, pruned_loss=0.04793, over 954387.73 frames. ], batch size: 41, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:44:00,967 INFO [finetune.py:976] (0/7) Epoch 23, batch 2750, loss[loss=0.1789, simple_loss=0.2478, pruned_loss=0.05501, over 4707.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2405, pruned_loss=0.04697, over 954538.94 frames. ], batch size: 59, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:44:04,649 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.255e+01 1.547e+02 1.823e+02 2.138e+02 3.437e+02, threshold=3.646e+02, percent-clipped=0.0 +2023-04-27 18:44:12,837 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=128769.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:44:27,143 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128781.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:45:09,448 INFO [finetune.py:976] (0/7) Epoch 23, batch 2800, loss[loss=0.167, simple_loss=0.2352, pruned_loss=0.04938, over 4701.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2387, pruned_loss=0.04714, over 955722.41 frames. ], batch size: 23, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:45:33,764 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7791, 1.5539, 0.6981, 1.4753, 1.6080, 1.5976, 1.5447, 1.5464], + device='cuda:0'), covar=tensor([0.0508, 0.0362, 0.0393, 0.0535, 0.0287, 0.0478, 0.0509, 0.0563], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0024, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 18:45:55,099 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128842.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:46:17,709 INFO [finetune.py:976] (0/7) Epoch 23, batch 2850, loss[loss=0.1417, simple_loss=0.2149, pruned_loss=0.03422, over 4762.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2383, pruned_loss=0.04747, over 955669.31 frames. ], batch size: 26, lr: 3.10e-03, grad_scale: 32.0 +2023-04-27 18:46:22,646 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.467e+02 1.826e+02 2.202e+02 4.194e+02, threshold=3.652e+02, percent-clipped=1.0 +2023-04-27 18:46:33,839 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.4339, 4.3590, 3.1035, 5.0840, 4.3906, 4.3437, 1.7874, 4.4204], + device='cuda:0'), covar=tensor([0.1595, 0.1052, 0.3380, 0.0951, 0.3750, 0.1747, 0.5768, 0.1979], + device='cuda:0'), in_proj_covar=tensor([0.0243, 0.0217, 0.0251, 0.0304, 0.0294, 0.0245, 0.0272, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 18:46:48,980 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9807, 1.7689, 2.3592, 2.4311, 1.7342, 1.6257, 1.9476, 1.0000], + device='cuda:0'), covar=tensor([0.0581, 0.0628, 0.0400, 0.0651, 0.0757, 0.0997, 0.0597, 0.0704], + device='cuda:0'), in_proj_covar=tensor([0.0068, 0.0067, 0.0065, 0.0067, 0.0074, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 18:46:50,209 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=128890.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 18:47:11,862 INFO [finetune.py:976] (0/7) Epoch 23, batch 2900, loss[loss=0.1786, simple_loss=0.266, pruned_loss=0.04559, over 4923.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2407, pruned_loss=0.04814, over 955582.86 frames. ], batch size: 38, lr: 3.09e-03, grad_scale: 32.0 +2023-04-27 18:47:28,308 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8715, 1.0381, 1.5890, 1.6701, 1.6530, 1.6966, 1.5830, 1.5689], + device='cuda:0'), covar=tensor([0.4626, 0.5320, 0.4304, 0.4245, 0.5455, 0.6867, 0.4765, 0.4540], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0372, 0.0324, 0.0336, 0.0346, 0.0392, 0.0355, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 18:47:56,517 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=128951.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 18:48:04,747 INFO [finetune.py:976] (0/7) Epoch 23, batch 2950, loss[loss=0.1395, simple_loss=0.2085, pruned_loss=0.0352, over 4705.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2426, pruned_loss=0.04835, over 955009.08 frames. ], batch size: 23, lr: 3.09e-03, grad_scale: 32.0 +2023-04-27 18:48:10,254 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.058e+02 1.560e+02 1.806e+02 2.158e+02 5.289e+02, threshold=3.612e+02, percent-clipped=5.0 +2023-04-27 18:48:22,809 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=128976.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:49:01,104 INFO [finetune.py:976] (0/7) Epoch 23, batch 3000, loss[loss=0.1817, simple_loss=0.2571, pruned_loss=0.05318, over 4834.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2454, pruned_loss=0.04956, over 955559.82 frames. ], batch size: 49, lr: 3.09e-03, grad_scale: 32.0 +2023-04-27 18:49:01,106 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 18:49:17,638 INFO [finetune.py:1010] (0/7) Epoch 23, validation: loss=0.1527, simple_loss=0.2222, pruned_loss=0.04158, over 2265189.00 frames. +2023-04-27 18:49:17,639 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 18:49:31,984 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=129024.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:50:14,477 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4503, 1.3766, 4.0767, 3.7944, 3.5330, 3.9628, 3.8759, 3.5297], + device='cuda:0'), covar=tensor([0.6924, 0.5697, 0.1029, 0.1772, 0.1172, 0.1550, 0.1531, 0.1699], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0306, 0.0406, 0.0410, 0.0349, 0.0411, 0.0315, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:50:16,854 INFO [finetune.py:976] (0/7) Epoch 23, batch 3050, loss[loss=0.1581, simple_loss=0.2289, pruned_loss=0.04361, over 4801.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2469, pruned_loss=0.05041, over 954986.04 frames. ], batch size: 25, lr: 3.09e-03, grad_scale: 32.0 +2023-04-27 18:50:25,142 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.682e+02 1.957e+02 2.360e+02 4.958e+02, threshold=3.913e+02, percent-clipped=5.0 +2023-04-27 18:50:25,919 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2737, 2.0051, 2.3832, 2.7363, 2.3313, 2.2191, 2.3250, 2.1307], + device='cuda:0'), covar=tensor([0.4191, 0.5839, 0.5707, 0.4706, 0.5381, 0.7434, 0.6854, 0.7462], + device='cuda:0'), in_proj_covar=tensor([0.0435, 0.0419, 0.0512, 0.0508, 0.0465, 0.0496, 0.0499, 0.0511], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 18:50:34,466 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129069.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:51:23,635 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 18:51:30,733 INFO [finetune.py:976] (0/7) Epoch 23, batch 3100, loss[loss=0.22, simple_loss=0.2692, pruned_loss=0.08541, over 4907.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.244, pruned_loss=0.04916, over 954976.01 frames. ], batch size: 32, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 18:51:35,657 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=129117.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:52:07,896 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129137.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:52:29,957 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4983, 3.2928, 0.8530, 1.8238, 1.9296, 2.3199, 1.8969, 0.9348], + device='cuda:0'), covar=tensor([0.1279, 0.0868, 0.1972, 0.1152, 0.0929, 0.0972, 0.1353, 0.1851], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0235, 0.0135, 0.0118, 0.0130, 0.0149, 0.0116, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 18:52:39,820 INFO [finetune.py:976] (0/7) Epoch 23, batch 3150, loss[loss=0.1536, simple_loss=0.2252, pruned_loss=0.04099, over 4911.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2416, pruned_loss=0.04902, over 954747.21 frames. ], batch size: 35, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 18:52:50,085 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.492e+02 1.784e+02 2.330e+02 3.707e+02, threshold=3.568e+02, percent-clipped=0.0 +2023-04-27 18:53:34,414 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0764, 2.3877, 1.1115, 1.4224, 1.9182, 1.1731, 3.0959, 1.6636], + device='cuda:0'), covar=tensor([0.0670, 0.0671, 0.0804, 0.1152, 0.0503, 0.1004, 0.0238, 0.0605], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 18:53:46,919 INFO [finetune.py:976] (0/7) Epoch 23, batch 3200, loss[loss=0.143, simple_loss=0.2152, pruned_loss=0.0354, over 4872.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2381, pruned_loss=0.04742, over 957461.36 frames. ], batch size: 31, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 18:54:24,526 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 18:54:35,396 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9859, 2.3432, 2.0238, 2.2875, 1.7638, 2.0060, 1.9050, 1.7102], + device='cuda:0'), covar=tensor([0.1609, 0.1141, 0.0715, 0.1063, 0.2784, 0.1030, 0.1746, 0.2249], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0299, 0.0213, 0.0274, 0.0312, 0.0255, 0.0247, 0.0262], + device='cuda:0'), out_proj_covar=tensor([1.1297e-04, 1.1842e-04, 8.4026e-05, 1.0813e-04, 1.2593e-04, 1.0073e-04, + 9.9574e-05, 1.0333e-04], device='cuda:0') +2023-04-27 18:54:35,960 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129246.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 18:54:55,918 INFO [finetune.py:976] (0/7) Epoch 23, batch 3250, loss[loss=0.2387, simple_loss=0.2993, pruned_loss=0.08906, over 4815.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2387, pruned_loss=0.04776, over 955879.24 frames. ], batch size: 41, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 18:55:06,949 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.754e+01 1.533e+02 1.786e+02 2.239e+02 3.821e+02, threshold=3.572e+02, percent-clipped=2.0 +2023-04-27 18:55:51,326 INFO [finetune.py:976] (0/7) Epoch 23, batch 3300, loss[loss=0.182, simple_loss=0.2669, pruned_loss=0.04856, over 4868.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2437, pruned_loss=0.04968, over 954162.39 frames. ], batch size: 34, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 18:56:48,062 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.4622, 1.4301, 1.4241, 1.0896, 1.4357, 1.2403, 1.7415, 1.3504], + device='cuda:0'), covar=tensor([0.3682, 0.1720, 0.4999, 0.2538, 0.1518, 0.2161, 0.1425, 0.4536], + device='cuda:0'), in_proj_covar=tensor([0.0335, 0.0346, 0.0424, 0.0347, 0.0374, 0.0371, 0.0364, 0.0415], + device='cuda:0'), out_proj_covar=tensor([9.9352e-05, 1.0344e-04, 1.2843e-04, 1.0443e-04, 1.1110e-04, 1.1061e-04, + 1.0692e-04, 1.2496e-04], device='cuda:0') +2023-04-27 18:56:51,618 INFO [finetune.py:976] (0/7) Epoch 23, batch 3350, loss[loss=0.1757, simple_loss=0.2524, pruned_loss=0.04945, over 4895.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2455, pruned_loss=0.04995, over 954558.63 frames. ], batch size: 37, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 18:57:01,578 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3327, 1.5194, 1.5334, 1.7887, 1.6194, 1.7724, 1.3978, 3.4512], + device='cuda:0'), covar=tensor([0.0628, 0.0934, 0.0867, 0.1333, 0.0745, 0.0534, 0.0903, 0.0197], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0042, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0012, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 18:57:02,541 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.708e+02 1.958e+02 2.263e+02 4.173e+02, threshold=3.917e+02, percent-clipped=1.0 +2023-04-27 18:57:34,703 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8009, 3.7786, 2.7902, 4.4195, 3.8925, 3.7726, 1.7924, 3.8068], + device='cuda:0'), covar=tensor([0.1707, 0.1208, 0.2919, 0.1515, 0.2846, 0.1693, 0.5640, 0.2251], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0217, 0.0252, 0.0304, 0.0295, 0.0245, 0.0272, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 18:57:58,595 INFO [finetune.py:976] (0/7) Epoch 23, batch 3400, loss[loss=0.2395, simple_loss=0.3034, pruned_loss=0.08776, over 4254.00 frames. ], tot_loss[loss=0.1738, simple_loss=0.2466, pruned_loss=0.05052, over 955712.92 frames. ], batch size: 65, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 18:58:06,878 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129413.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 18:58:08,710 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-27 18:58:40,251 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129437.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 18:59:06,101 INFO [finetune.py:976] (0/7) Epoch 23, batch 3450, loss[loss=0.1518, simple_loss=0.2162, pruned_loss=0.04377, over 4401.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.2468, pruned_loss=0.05047, over 956585.58 frames. ], batch size: 19, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 18:59:15,874 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.992e+01 1.559e+02 1.867e+02 2.155e+02 3.707e+02, threshold=3.734e+02, percent-clipped=0.0 +2023-04-27 18:59:26,977 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129474.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 18:59:39,674 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=129485.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:00:05,976 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-27 19:00:07,510 INFO [finetune.py:976] (0/7) Epoch 23, batch 3500, loss[loss=0.1447, simple_loss=0.2122, pruned_loss=0.03859, over 4735.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2444, pruned_loss=0.04977, over 956502.13 frames. ], batch size: 23, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:00:40,768 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2183, 1.4442, 1.6609, 1.8271, 1.6925, 1.8168, 1.7314, 1.7570], + device='cuda:0'), covar=tensor([0.3723, 0.4591, 0.3896, 0.3341, 0.4976, 0.6525, 0.4165, 0.3982], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0377, 0.0329, 0.0340, 0.0349, 0.0397, 0.0359, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 19:00:46,471 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=129546.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 19:00:47,225 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-27 19:01:00,755 INFO [finetune.py:976] (0/7) Epoch 23, batch 3550, loss[loss=0.1549, simple_loss=0.2251, pruned_loss=0.04234, over 4912.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2417, pruned_loss=0.04896, over 957364.29 frames. ], batch size: 43, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:01:05,537 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.87 vs. limit=5.0 +2023-04-27 19:01:08,386 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.192e+01 1.610e+02 1.893e+02 2.308e+02 5.470e+02, threshold=3.785e+02, percent-clipped=3.0 +2023-04-27 19:01:38,374 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=129594.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 19:01:57,767 INFO [finetune.py:976] (0/7) Epoch 23, batch 3600, loss[loss=0.1757, simple_loss=0.2467, pruned_loss=0.05232, over 4870.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2398, pruned_loss=0.04807, over 957765.46 frames. ], batch size: 34, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:01:59,677 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129612.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:02:16,866 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6751, 1.9356, 5.6357, 5.3418, 5.0655, 5.4133, 5.0098, 4.9737], + device='cuda:0'), covar=tensor([0.5760, 0.5817, 0.0927, 0.1580, 0.0997, 0.1618, 0.1119, 0.1620], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0306, 0.0407, 0.0406, 0.0347, 0.0408, 0.0315, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 19:02:47,465 INFO [finetune.py:976] (0/7) Epoch 23, batch 3650, loss[loss=0.1293, simple_loss=0.2061, pruned_loss=0.02624, over 4794.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2406, pruned_loss=0.04819, over 956816.98 frames. ], batch size: 25, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:02:51,891 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.776e+01 1.523e+02 1.795e+02 2.282e+02 4.473e+02, threshold=3.590e+02, percent-clipped=4.0 +2023-04-27 19:03:02,055 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129673.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:03:55,981 INFO [finetune.py:976] (0/7) Epoch 23, batch 3700, loss[loss=0.1973, simple_loss=0.2736, pruned_loss=0.06044, over 4809.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2446, pruned_loss=0.04967, over 955130.60 frames. ], batch size: 51, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:05:02,870 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6885, 1.7906, 0.7942, 1.3504, 1.7483, 1.5210, 1.4299, 1.4959], + device='cuda:0'), covar=tensor([0.0478, 0.0365, 0.0346, 0.0581, 0.0277, 0.0512, 0.0508, 0.0562], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 19:05:03,364 INFO [finetune.py:976] (0/7) Epoch 23, batch 3750, loss[loss=0.1684, simple_loss=0.2444, pruned_loss=0.04621, over 4785.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2457, pruned_loss=0.05017, over 953096.27 frames. ], batch size: 29, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:05:04,808 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3599, 1.7592, 1.8408, 1.9296, 1.8007, 1.8313, 1.9114, 1.8312], + device='cuda:0'), covar=tensor([0.4004, 0.5119, 0.4048, 0.4038, 0.5116, 0.6692, 0.4639, 0.4363], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0372, 0.0326, 0.0338, 0.0346, 0.0393, 0.0356, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 19:05:12,867 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.590e+02 1.958e+02 2.395e+02 5.557e+02, threshold=3.915e+02, percent-clipped=3.0 +2023-04-27 19:05:15,281 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129769.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 19:05:23,576 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7234, 1.1273, 1.7525, 2.1989, 1.7812, 1.6443, 1.7402, 1.6315], + device='cuda:0'), covar=tensor([0.4310, 0.6311, 0.5339, 0.5266, 0.5291, 0.7248, 0.6791, 0.7972], + device='cuda:0'), in_proj_covar=tensor([0.0433, 0.0417, 0.0511, 0.0507, 0.0462, 0.0495, 0.0499, 0.0509], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 19:05:45,734 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=129791.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:06:08,153 INFO [finetune.py:976] (0/7) Epoch 23, batch 3800, loss[loss=0.1539, simple_loss=0.2356, pruned_loss=0.03612, over 4748.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.247, pruned_loss=0.05043, over 954356.54 frames. ], batch size: 28, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:06:19,775 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0610, 2.4821, 2.1468, 2.4359, 1.8536, 2.1828, 2.1460, 1.8545], + device='cuda:0'), covar=tensor([0.1908, 0.1131, 0.0766, 0.1146, 0.2838, 0.1098, 0.1678, 0.2199], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0298, 0.0212, 0.0273, 0.0311, 0.0253, 0.0245, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1242e-04, 1.1788e-04, 8.3659e-05, 1.0764e-04, 1.2551e-04, 1.0003e-04, + 9.8816e-05, 1.0236e-04], device='cuda:0') +2023-04-27 19:07:04,491 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=129852.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:07:13,929 INFO [finetune.py:976] (0/7) Epoch 23, batch 3850, loss[loss=0.1525, simple_loss=0.2312, pruned_loss=0.03686, over 4831.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2448, pruned_loss=0.04915, over 955043.06 frames. ], batch size: 47, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:07:14,750 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 19:07:24,865 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.155e+02 1.706e+02 1.886e+02 2.157e+02 5.437e+02, threshold=3.771e+02, percent-clipped=4.0 +2023-04-27 19:07:54,087 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-27 19:08:15,872 INFO [finetune.py:976] (0/7) Epoch 23, batch 3900, loss[loss=0.159, simple_loss=0.2351, pruned_loss=0.04149, over 4842.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2411, pruned_loss=0.04805, over 955286.58 frames. ], batch size: 49, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:08:19,742 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0551, 1.8358, 1.9878, 2.5459, 2.4222, 1.9427, 1.6402, 2.1389], + device='cuda:0'), covar=tensor([0.0854, 0.1135, 0.0800, 0.0519, 0.0645, 0.0897, 0.0723, 0.0613], + device='cuda:0'), in_proj_covar=tensor([0.0188, 0.0204, 0.0186, 0.0174, 0.0178, 0.0182, 0.0151, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 19:09:17,305 INFO [finetune.py:976] (0/7) Epoch 23, batch 3950, loss[loss=0.155, simple_loss=0.22, pruned_loss=0.04495, over 4829.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2382, pruned_loss=0.04737, over 952993.90 frames. ], batch size: 30, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:09:27,940 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.540e+01 1.458e+02 1.749e+02 1.978e+02 4.242e+02, threshold=3.497e+02, percent-clipped=1.0 +2023-04-27 19:09:29,241 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=129968.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:10:14,781 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-130000.pt +2023-04-27 19:10:16,069 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130000.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:10:22,143 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130002.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:10:33,622 INFO [finetune.py:976] (0/7) Epoch 23, batch 4000, loss[loss=0.1643, simple_loss=0.2538, pruned_loss=0.03742, over 4820.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2387, pruned_loss=0.0474, over 954104.59 frames. ], batch size: 39, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:11:35,003 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-27 19:11:43,267 INFO [finetune.py:976] (0/7) Epoch 23, batch 4050, loss[loss=0.2133, simple_loss=0.2886, pruned_loss=0.06904, over 4930.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2421, pruned_loss=0.04862, over 955039.18 frames. ], batch size: 38, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:11:44,622 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130061.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:11:45,890 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130063.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 19:11:53,610 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.688e+02 1.937e+02 2.332e+02 4.574e+02, threshold=3.874e+02, percent-clipped=1.0 +2023-04-27 19:11:56,081 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130069.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 19:11:57,436 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-27 19:12:28,839 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.28 vs. limit=5.0 +2023-04-27 19:12:52,033 INFO [finetune.py:976] (0/7) Epoch 23, batch 4100, loss[loss=0.1919, simple_loss=0.2617, pruned_loss=0.06109, over 4928.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2447, pruned_loss=0.04929, over 952944.18 frames. ], batch size: 33, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:13:01,818 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=130117.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 19:13:32,083 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130147.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:13:51,930 INFO [finetune.py:976] (0/7) Epoch 23, batch 4150, loss[loss=0.1788, simple_loss=0.2508, pruned_loss=0.05341, over 4817.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2461, pruned_loss=0.05, over 953477.87 frames. ], batch size: 30, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:14:02,699 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.725e+02 1.972e+02 2.342e+02 5.183e+02, threshold=3.944e+02, percent-clipped=4.0 +2023-04-27 19:14:40,203 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8746, 3.8623, 2.9741, 4.4848, 3.8932, 3.9033, 1.5708, 3.9214], + device='cuda:0'), covar=tensor([0.1794, 0.1215, 0.3065, 0.1496, 0.3152, 0.1751, 0.5796, 0.2266], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0220, 0.0255, 0.0308, 0.0300, 0.0248, 0.0277, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 19:14:48,093 INFO [finetune.py:976] (0/7) Epoch 23, batch 4200, loss[loss=0.1269, simple_loss=0.2058, pruned_loss=0.024, over 4780.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2455, pruned_loss=0.04916, over 952433.49 frames. ], batch size: 26, lr: 3.09e-03, grad_scale: 16.0 +2023-04-27 19:14:50,672 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-27 19:15:10,233 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6980, 0.9899, 1.7088, 2.1452, 1.7880, 1.6351, 1.6829, 1.6479], + device='cuda:0'), covar=tensor([0.4429, 0.6913, 0.6063, 0.5925, 0.5863, 0.7482, 0.7994, 0.8392], + device='cuda:0'), in_proj_covar=tensor([0.0433, 0.0417, 0.0510, 0.0506, 0.0462, 0.0495, 0.0499, 0.0510], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 19:15:18,288 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3287, 1.3742, 1.4144, 1.5811, 1.6460, 1.3211, 0.9120, 1.4554], + device='cuda:0'), covar=tensor([0.0815, 0.1279, 0.0851, 0.0676, 0.0678, 0.0835, 0.0850, 0.0662], + device='cuda:0'), in_proj_covar=tensor([0.0187, 0.0203, 0.0184, 0.0174, 0.0177, 0.0180, 0.0150, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 19:15:51,651 INFO [finetune.py:976] (0/7) Epoch 23, batch 4250, loss[loss=0.1735, simple_loss=0.2405, pruned_loss=0.05325, over 4813.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2431, pruned_loss=0.04851, over 954636.75 frames. ], batch size: 39, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:16:01,963 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.003e+02 1.509e+02 1.751e+02 2.292e+02 3.450e+02, threshold=3.503e+02, percent-clipped=0.0 +2023-04-27 19:16:03,335 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130268.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:16:53,663 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-27 19:16:55,318 INFO [finetune.py:976] (0/7) Epoch 23, batch 4300, loss[loss=0.194, simple_loss=0.2604, pruned_loss=0.06381, over 4862.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2394, pruned_loss=0.04744, over 954512.20 frames. ], batch size: 34, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:17:05,105 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=130316.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:17:39,012 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8609, 2.5326, 1.9603, 1.8888, 1.4166, 1.4089, 1.9258, 1.3478], + device='cuda:0'), covar=tensor([0.1610, 0.1273, 0.1292, 0.1636, 0.2185, 0.1910, 0.0975, 0.1952], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0212, 0.0169, 0.0206, 0.0201, 0.0186, 0.0156, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 19:17:58,667 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130356.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:17:59,937 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130358.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 19:18:05,662 INFO [finetune.py:976] (0/7) Epoch 23, batch 4350, loss[loss=0.1304, simple_loss=0.1927, pruned_loss=0.034, over 4768.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2369, pruned_loss=0.04674, over 955306.74 frames. ], batch size: 26, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:18:08,992 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5827, 1.6843, 0.8688, 1.3511, 1.8237, 1.4637, 1.3871, 1.4966], + device='cuda:0'), covar=tensor([0.0484, 0.0352, 0.0331, 0.0534, 0.0271, 0.0484, 0.0474, 0.0534], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 19:18:10,121 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.083e+02 1.587e+02 1.809e+02 2.203e+02 4.398e+02, threshold=3.619e+02, percent-clipped=3.0 +2023-04-27 19:18:11,020 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 19:18:29,049 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130384.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:19:02,657 INFO [finetune.py:976] (0/7) Epoch 23, batch 4400, loss[loss=0.1707, simple_loss=0.2379, pruned_loss=0.05172, over 4809.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2385, pruned_loss=0.0475, over 952413.04 frames. ], batch size: 29, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:19:34,752 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130432.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:19:45,120 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130438.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:19:55,328 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130445.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:19:56,565 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130447.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:20:16,252 INFO [finetune.py:976] (0/7) Epoch 23, batch 4450, loss[loss=0.1671, simple_loss=0.2405, pruned_loss=0.04684, over 4825.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2418, pruned_loss=0.04877, over 954059.20 frames. ], batch size: 40, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:20:25,830 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.347e+01 1.587e+02 1.803e+02 2.328e+02 4.838e+02, threshold=3.606e+02, percent-clipped=2.0 +2023-04-27 19:20:28,523 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6866, 3.5789, 1.2548, 1.8436, 2.1599, 2.6245, 2.0719, 1.1682], + device='cuda:0'), covar=tensor([0.1351, 0.0965, 0.1847, 0.1365, 0.1028, 0.1038, 0.1602, 0.1813], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0240, 0.0138, 0.0121, 0.0133, 0.0152, 0.0118, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 19:20:36,469 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4591, 2.3797, 2.6268, 3.0906, 3.0236, 2.3831, 2.0811, 2.7511], + device='cuda:0'), covar=tensor([0.0899, 0.0939, 0.0627, 0.0510, 0.0567, 0.0843, 0.0693, 0.0510], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0200, 0.0181, 0.0171, 0.0174, 0.0178, 0.0147, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 19:21:01,279 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130493.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:21:02,482 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=130495.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:21:11,827 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130499.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:21:23,654 INFO [finetune.py:976] (0/7) Epoch 23, batch 4500, loss[loss=0.1934, simple_loss=0.2701, pruned_loss=0.05834, over 4897.00 frames. ], tot_loss[loss=0.1726, simple_loss=0.245, pruned_loss=0.05009, over 953983.54 frames. ], batch size: 37, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:22:09,602 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6541, 2.0927, 2.0714, 2.2605, 2.3501, 2.3285, 1.7830, 4.8130], + device='cuda:0'), covar=tensor([0.0475, 0.0703, 0.0664, 0.1071, 0.0516, 0.0437, 0.0676, 0.0091], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 19:22:17,948 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130548.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:22:31,479 INFO [finetune.py:976] (0/7) Epoch 23, batch 4550, loss[loss=0.1482, simple_loss=0.218, pruned_loss=0.03924, over 4713.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2453, pruned_loss=0.04986, over 952996.79 frames. ], batch size: 23, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:22:31,551 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9203, 2.8805, 2.1277, 3.3157, 2.9213, 2.9207, 1.1823, 2.7953], + device='cuda:0'), covar=tensor([0.2254, 0.1681, 0.3493, 0.3051, 0.2935, 0.2078, 0.5825, 0.3127], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0221, 0.0256, 0.0309, 0.0300, 0.0250, 0.0278, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 19:22:42,281 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=130565.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:22:42,754 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.159e+02 1.614e+02 1.836e+02 2.049e+02 5.292e+02, threshold=3.672e+02, percent-clipped=1.0 +2023-04-27 19:23:36,954 INFO [finetune.py:976] (0/7) Epoch 23, batch 4600, loss[loss=0.1502, simple_loss=0.2211, pruned_loss=0.03963, over 4759.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.2445, pruned_loss=0.04934, over 951114.59 frames. ], batch size: 54, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:23:37,083 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130609.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 19:23:56,069 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=130626.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:24:36,988 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130656.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:24:38,223 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=130658.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 19:24:39,268 INFO [finetune.py:976] (0/7) Epoch 23, batch 4650, loss[loss=0.1862, simple_loss=0.2489, pruned_loss=0.06171, over 4757.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2428, pruned_loss=0.04912, over 953977.18 frames. ], batch size: 27, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:24:47,186 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5130, 1.7320, 1.6806, 2.1253, 2.0426, 2.2119, 1.4913, 4.5407], + device='cuda:0'), covar=tensor([0.0520, 0.0795, 0.0787, 0.1139, 0.0603, 0.0475, 0.0718, 0.0101], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 19:24:48,333 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.298e+01 1.478e+02 1.669e+02 2.060e+02 3.844e+02, threshold=3.337e+02, percent-clipped=2.0 +2023-04-27 19:25:38,854 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-27 19:25:39,294 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=130704.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:25:40,552 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=130706.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:25:47,205 INFO [finetune.py:976] (0/7) Epoch 23, batch 4700, loss[loss=0.1588, simple_loss=0.2331, pruned_loss=0.04226, over 4688.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2397, pruned_loss=0.04834, over 952324.62 frames. ], batch size: 23, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:26:22,690 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130740.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:26:50,257 INFO [finetune.py:976] (0/7) Epoch 23, batch 4750, loss[loss=0.2081, simple_loss=0.2666, pruned_loss=0.07479, over 4940.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.237, pruned_loss=0.0476, over 952025.15 frames. ], batch size: 38, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:27:00,091 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.517e+02 1.838e+02 2.137e+02 3.767e+02, threshold=3.677e+02, percent-clipped=3.0 +2023-04-27 19:27:21,659 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-27 19:27:24,058 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130788.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:27:33,669 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130794.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:27:54,321 INFO [finetune.py:976] (0/7) Epoch 23, batch 4800, loss[loss=0.2011, simple_loss=0.282, pruned_loss=0.0601, over 4832.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2398, pruned_loss=0.04843, over 952611.51 frames. ], batch size: 47, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:28:54,603 INFO [finetune.py:976] (0/7) Epoch 23, batch 4850, loss[loss=0.1655, simple_loss=0.2507, pruned_loss=0.04021, over 4870.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2435, pruned_loss=0.04949, over 953067.23 frames. ], batch size: 34, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:28:54,754 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9609, 1.2245, 1.6467, 1.7410, 1.7244, 1.7713, 1.6812, 1.6764], + device='cuda:0'), covar=tensor([0.4062, 0.4736, 0.4156, 0.4297, 0.5089, 0.6817, 0.4428, 0.4158], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0376, 0.0328, 0.0340, 0.0350, 0.0396, 0.0359, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 19:29:05,298 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.564e+02 1.952e+02 2.266e+02 5.605e+02, threshold=3.905e+02, percent-clipped=3.0 +2023-04-27 19:29:51,160 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2489, 1.5074, 1.7658, 1.8719, 1.7776, 1.8524, 1.7574, 1.7864], + device='cuda:0'), covar=tensor([0.3975, 0.5043, 0.4566, 0.4343, 0.5493, 0.6717, 0.5324, 0.4897], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0376, 0.0328, 0.0340, 0.0350, 0.0396, 0.0359, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 19:30:00,387 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130904.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 19:30:03,973 INFO [finetune.py:976] (0/7) Epoch 23, batch 4900, loss[loss=0.1912, simple_loss=0.242, pruned_loss=0.0702, over 4742.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2448, pruned_loss=0.04967, over 954890.79 frames. ], batch size: 23, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:30:23,205 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=130921.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:30:59,048 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9074, 1.4044, 1.7285, 1.7957, 1.6858, 1.4398, 0.8312, 1.4039], + device='cuda:0'), covar=tensor([0.3117, 0.3101, 0.1636, 0.1897, 0.2396, 0.2482, 0.4072, 0.1927], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0244, 0.0227, 0.0312, 0.0219, 0.0233, 0.0225, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 19:31:11,883 INFO [finetune.py:976] (0/7) Epoch 23, batch 4950, loss[loss=0.1382, simple_loss=0.2135, pruned_loss=0.03139, over 4783.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2461, pruned_loss=0.04979, over 955033.00 frames. ], batch size: 26, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:31:20,282 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-27 19:31:21,966 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.168e+02 1.568e+02 1.814e+02 2.171e+02 6.365e+02, threshold=3.628e+02, percent-clipped=1.0 +2023-04-27 19:31:53,026 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4112, 1.5722, 1.3883, 1.6007, 1.3503, 1.3666, 1.3830, 1.0800], + device='cuda:0'), covar=tensor([0.1522, 0.1195, 0.0892, 0.0980, 0.3365, 0.1070, 0.1594, 0.2084], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0302, 0.0215, 0.0275, 0.0316, 0.0256, 0.0249, 0.0262], + device='cuda:0'), out_proj_covar=tensor([1.1395e-04, 1.1940e-04, 8.4514e-05, 1.0840e-04, 1.2771e-04, 1.0126e-04, + 1.0043e-04, 1.0358e-04], device='cuda:0') +2023-04-27 19:32:02,716 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7721, 2.5093, 2.0192, 2.3059, 2.4874, 2.0506, 2.9742, 1.7544], + device='cuda:0'), covar=tensor([0.3253, 0.1935, 0.4215, 0.2944, 0.1748, 0.2505, 0.1839, 0.4356], + device='cuda:0'), in_proj_covar=tensor([0.0335, 0.0346, 0.0423, 0.0348, 0.0375, 0.0372, 0.0367, 0.0417], + device='cuda:0'), out_proj_covar=tensor([9.9399e-05, 1.0335e-04, 1.2829e-04, 1.0476e-04, 1.1129e-04, 1.1096e-04, + 1.0777e-04, 1.2549e-04], device='cuda:0') +2023-04-27 19:32:26,076 INFO [finetune.py:976] (0/7) Epoch 23, batch 5000, loss[loss=0.1231, simple_loss=0.1897, pruned_loss=0.02823, over 4826.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2444, pruned_loss=0.04908, over 955318.41 frames. ], batch size: 49, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:32:26,338 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.45 vs. limit=5.0 +2023-04-27 19:33:10,248 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131040.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:33:35,486 INFO [finetune.py:976] (0/7) Epoch 23, batch 5050, loss[loss=0.1481, simple_loss=0.2237, pruned_loss=0.03622, over 4823.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2426, pruned_loss=0.04923, over 955243.63 frames. ], batch size: 25, lr: 3.08e-03, grad_scale: 16.0 +2023-04-27 19:33:41,158 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.462e+02 1.838e+02 2.219e+02 3.974e+02, threshold=3.675e+02, percent-clipped=2.0 +2023-04-27 19:34:08,382 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=131088.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:34:08,409 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131088.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:34:18,270 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131094.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:34:18,313 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7665, 2.9068, 2.4914, 2.6603, 3.0475, 2.4677, 3.9349, 2.3074], + device='cuda:0'), covar=tensor([0.3584, 0.1993, 0.3717, 0.2969, 0.1583, 0.2730, 0.1489, 0.3597], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0350, 0.0427, 0.0352, 0.0378, 0.0376, 0.0370, 0.0420], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 19:34:38,867 INFO [finetune.py:976] (0/7) Epoch 23, batch 5100, loss[loss=0.1285, simple_loss=0.2082, pruned_loss=0.02439, over 4783.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2386, pruned_loss=0.04778, over 956109.91 frames. ], batch size: 29, lr: 3.08e-03, grad_scale: 32.0 +2023-04-27 19:34:40,181 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9475, 1.6087, 1.4869, 1.7165, 2.1246, 1.7016, 1.4646, 1.4089], + device='cuda:0'), covar=tensor([0.1589, 0.1751, 0.1954, 0.1421, 0.0907, 0.1806, 0.2184, 0.2564], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0308, 0.0351, 0.0284, 0.0326, 0.0306, 0.0299, 0.0372], + device='cuda:0'), out_proj_covar=tensor([6.4248e-05, 6.3704e-05, 7.3874e-05, 5.7032e-05, 6.7091e-05, 6.4031e-05, + 6.2401e-05, 7.8921e-05], device='cuda:0') +2023-04-27 19:34:51,129 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 19:35:11,741 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=131136.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:35:21,307 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=131142.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:35:41,499 INFO [finetune.py:976] (0/7) Epoch 23, batch 5150, loss[loss=0.2197, simple_loss=0.269, pruned_loss=0.08524, over 4817.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2386, pruned_loss=0.04795, over 952675.57 frames. ], batch size: 38, lr: 3.08e-03, grad_scale: 32.0 +2023-04-27 19:35:51,236 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.590e+02 1.900e+02 2.291e+02 6.669e+02, threshold=3.800e+02, percent-clipped=5.0 +2023-04-27 19:36:02,277 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131174.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:36:11,469 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4559, 1.5729, 1.5186, 1.7230, 1.6946, 1.9125, 1.4282, 3.4618], + device='cuda:0'), covar=tensor([0.0579, 0.0807, 0.0776, 0.1225, 0.0647, 0.0509, 0.0738, 0.0159], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 19:36:32,800 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5862, 3.0027, 0.9751, 1.7989, 2.4950, 1.6769, 4.2617, 2.1078], + device='cuda:0'), covar=tensor([0.0600, 0.0879, 0.0924, 0.1248, 0.0500, 0.0932, 0.0279, 0.0600], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0050, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0009, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 19:36:42,352 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131204.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 19:36:45,362 INFO [finetune.py:976] (0/7) Epoch 23, batch 5200, loss[loss=0.1251, simple_loss=0.1886, pruned_loss=0.03076, over 4174.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2427, pruned_loss=0.04919, over 954469.00 frames. ], batch size: 18, lr: 3.08e-03, grad_scale: 32.0 +2023-04-27 19:37:02,202 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-27 19:37:04,419 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131221.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:37:22,653 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131235.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 19:37:43,642 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=131252.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:37:52,693 INFO [finetune.py:976] (0/7) Epoch 23, batch 5250, loss[loss=0.1586, simple_loss=0.2346, pruned_loss=0.04131, over 4873.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2449, pruned_loss=0.04949, over 956574.34 frames. ], batch size: 31, lr: 3.08e-03, grad_scale: 32.0 +2023-04-27 19:37:57,551 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.170e+02 1.647e+02 1.953e+02 2.357e+02 4.289e+02, threshold=3.906e+02, percent-clipped=1.0 +2023-04-27 19:38:04,768 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=131269.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:38:50,170 INFO [finetune.py:976] (0/7) Epoch 23, batch 5300, loss[loss=0.1839, simple_loss=0.2548, pruned_loss=0.05654, over 4817.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2458, pruned_loss=0.05001, over 955461.31 frames. ], batch size: 40, lr: 3.08e-03, grad_scale: 32.0 +2023-04-27 19:39:44,605 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8266, 1.8432, 1.7292, 1.5339, 1.8689, 1.5993, 2.3000, 1.5483], + device='cuda:0'), covar=tensor([0.3489, 0.1724, 0.4988, 0.2681, 0.1550, 0.2243, 0.1451, 0.4694], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0348, 0.0425, 0.0351, 0.0377, 0.0374, 0.0368, 0.0418], + device='cuda:0'), out_proj_covar=tensor([9.9936e-05, 1.0379e-04, 1.2891e-04, 1.0543e-04, 1.1196e-04, 1.1143e-04, + 1.0799e-04, 1.2565e-04], device='cuda:0') +2023-04-27 19:39:56,069 INFO [finetune.py:976] (0/7) Epoch 23, batch 5350, loss[loss=0.12, simple_loss=0.1917, pruned_loss=0.02415, over 4015.00 frames. ], tot_loss[loss=0.1731, simple_loss=0.2465, pruned_loss=0.04991, over 957228.98 frames. ], batch size: 17, lr: 3.08e-03, grad_scale: 32.0 +2023-04-27 19:40:05,427 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2391, 1.4814, 1.2271, 1.4255, 1.1845, 1.1564, 1.2172, 1.0082], + device='cuda:0'), covar=tensor([0.1836, 0.1591, 0.1200, 0.1503, 0.3784, 0.1512, 0.1812, 0.2140], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0298, 0.0213, 0.0273, 0.0312, 0.0254, 0.0246, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1273e-04, 1.1780e-04, 8.3955e-05, 1.0770e-04, 1.2615e-04, 1.0038e-04, + 9.9100e-05, 1.0264e-04], device='cuda:0') +2023-04-27 19:40:07,006 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.812e+01 1.537e+02 1.790e+02 2.239e+02 4.084e+02, threshold=3.580e+02, percent-clipped=2.0 +2023-04-27 19:40:52,093 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.47 vs. limit=2.0 +2023-04-27 19:41:03,583 INFO [finetune.py:976] (0/7) Epoch 23, batch 5400, loss[loss=0.1732, simple_loss=0.2328, pruned_loss=0.05682, over 4762.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2436, pruned_loss=0.04936, over 957191.21 frames. ], batch size: 26, lr: 3.08e-03, grad_scale: 32.0 +2023-04-27 19:41:03,701 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131409.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:41:12,761 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131416.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:42:12,940 INFO [finetune.py:976] (0/7) Epoch 23, batch 5450, loss[loss=0.215, simple_loss=0.2753, pruned_loss=0.07734, over 4821.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2413, pruned_loss=0.04895, over 954876.86 frames. ], batch size: 40, lr: 3.08e-03, grad_scale: 32.0 +2023-04-27 19:42:22,593 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.036e+02 1.575e+02 1.987e+02 2.572e+02 5.206e+02, threshold=3.973e+02, percent-clipped=5.0 +2023-04-27 19:42:31,922 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131470.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:42:43,222 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131477.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:42:58,641 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131492.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:43:22,050 INFO [finetune.py:976] (0/7) Epoch 23, batch 5500, loss[loss=0.1595, simple_loss=0.2342, pruned_loss=0.04237, over 4813.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2382, pruned_loss=0.04777, over 955144.69 frames. ], batch size: 41, lr: 3.08e-03, grad_scale: 32.0 +2023-04-27 19:43:29,155 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1825, 1.5179, 1.4099, 1.7519, 1.6714, 1.9676, 1.2950, 3.4291], + device='cuda:0'), covar=tensor([0.0633, 0.0792, 0.0757, 0.1135, 0.0613, 0.0562, 0.0758, 0.0154], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 19:43:52,905 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131530.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 19:44:16,290 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131553.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:44:21,726 INFO [finetune.py:976] (0/7) Epoch 23, batch 5550, loss[loss=0.1819, simple_loss=0.2585, pruned_loss=0.05263, over 4919.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2397, pruned_loss=0.04809, over 953917.71 frames. ], batch size: 33, lr: 3.08e-03, grad_scale: 32.0 +2023-04-27 19:44:32,382 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.589e+02 1.849e+02 2.302e+02 7.246e+02, threshold=3.698e+02, percent-clipped=1.0 +2023-04-27 19:44:36,005 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5874, 0.7169, 1.4988, 1.9555, 1.6642, 1.4940, 1.5389, 1.5054], + device='cuda:0'), covar=tensor([0.3611, 0.5711, 0.4993, 0.4863, 0.4689, 0.6391, 0.6130, 0.7455], + device='cuda:0'), in_proj_covar=tensor([0.0434, 0.0418, 0.0512, 0.0507, 0.0463, 0.0496, 0.0499, 0.0512], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 19:45:24,200 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.35 vs. limit=2.0 +2023-04-27 19:45:32,465 INFO [finetune.py:976] (0/7) Epoch 23, batch 5600, loss[loss=0.1691, simple_loss=0.2441, pruned_loss=0.04708, over 4820.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2429, pruned_loss=0.04909, over 950524.69 frames. ], batch size: 25, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:46:28,599 INFO [finetune.py:976] (0/7) Epoch 23, batch 5650, loss[loss=0.2, simple_loss=0.2731, pruned_loss=0.06343, over 4844.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2447, pruned_loss=0.04939, over 951096.88 frames. ], batch size: 47, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:46:38,007 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.000e+01 1.549e+02 1.818e+02 2.173e+02 4.178e+02, threshold=3.635e+02, percent-clipped=2.0 +2023-04-27 19:46:46,484 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-27 19:47:00,404 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7234, 2.1349, 1.8168, 2.0530, 1.6238, 1.8399, 1.6507, 1.5055], + device='cuda:0'), covar=tensor([0.1837, 0.1244, 0.0868, 0.1062, 0.3406, 0.1185, 0.1910, 0.2337], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0298, 0.0213, 0.0273, 0.0312, 0.0255, 0.0246, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1323e-04, 1.1791e-04, 8.3876e-05, 1.0773e-04, 1.2602e-04, 1.0051e-04, + 9.9365e-05, 1.0271e-04], device='cuda:0') +2023-04-27 19:47:29,410 INFO [finetune.py:976] (0/7) Epoch 23, batch 5700, loss[loss=0.1547, simple_loss=0.2129, pruned_loss=0.04823, over 4316.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2422, pruned_loss=0.04912, over 935524.22 frames. ], batch size: 19, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:48:02,269 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-23.pt +2023-04-27 19:48:21,856 INFO [finetune.py:976] (0/7) Epoch 24, batch 0, loss[loss=0.1253, simple_loss=0.1897, pruned_loss=0.03044, over 4524.00 frames. ], tot_loss[loss=0.1253, simple_loss=0.1897, pruned_loss=0.03044, over 4524.00 frames. ], batch size: 19, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:48:21,857 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 19:48:24,681 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3713, 1.5363, 1.9320, 2.0425, 1.9921, 2.0980, 1.9356, 2.0113], + device='cuda:0'), covar=tensor([0.3826, 0.5109, 0.4511, 0.4589, 0.5097, 0.7016, 0.5175, 0.4382], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0375, 0.0328, 0.0339, 0.0349, 0.0394, 0.0357, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 19:48:36,753 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2094, 1.5320, 1.7103, 1.8561, 1.8340, 1.8811, 1.7373, 1.7854], + device='cuda:0'), covar=tensor([0.4089, 0.5139, 0.4659, 0.4233, 0.5277, 0.6460, 0.5345, 0.4805], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0375, 0.0328, 0.0339, 0.0349, 0.0394, 0.0357, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 19:48:37,426 INFO [finetune.py:1010] (0/7) Epoch 24, validation: loss=0.1552, simple_loss=0.2243, pruned_loss=0.04308, over 2265189.00 frames. +2023-04-27 19:48:37,427 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 19:49:12,817 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131765.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:49:13,311 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.101e+01 1.500e+02 1.790e+02 2.328e+02 5.462e+02, threshold=3.579e+02, percent-clipped=4.0 +2023-04-27 19:49:21,978 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131772.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:49:37,727 INFO [finetune.py:976] (0/7) Epoch 24, batch 50, loss[loss=0.1537, simple_loss=0.2266, pruned_loss=0.04045, over 4753.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2446, pruned_loss=0.04771, over 215222.07 frames. ], batch size: 28, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:50:39,016 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=131830.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:50:48,615 INFO [finetune.py:976] (0/7) Epoch 24, batch 100, loss[loss=0.1175, simple_loss=0.1716, pruned_loss=0.03173, over 4300.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2368, pruned_loss=0.046, over 379386.97 frames. ], batch size: 18, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:50:56,376 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=131848.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:50:58,848 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3334, 2.1441, 2.4741, 2.9847, 2.8763, 2.3117, 2.1317, 2.5200], + device='cuda:0'), covar=tensor([0.0867, 0.1133, 0.0687, 0.0547, 0.0598, 0.0875, 0.0699, 0.0530], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0201, 0.0182, 0.0171, 0.0174, 0.0177, 0.0147, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 19:51:01,827 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=131857.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:51:07,128 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.089e+01 1.524e+02 1.795e+02 2.090e+02 4.072e+02, threshold=3.590e+02, percent-clipped=1.0 +2023-04-27 19:51:23,455 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=131878.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:51:35,579 INFO [finetune.py:976] (0/7) Epoch 24, batch 150, loss[loss=0.163, simple_loss=0.2254, pruned_loss=0.0503, over 4813.00 frames. ], tot_loss[loss=0.163, simple_loss=0.234, pruned_loss=0.04595, over 505649.53 frames. ], batch size: 38, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:52:18,767 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=131918.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:52:44,207 INFO [finetune.py:976] (0/7) Epoch 24, batch 200, loss[loss=0.1714, simple_loss=0.2465, pruned_loss=0.04819, over 4103.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2344, pruned_loss=0.04686, over 604019.03 frames. ], batch size: 65, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:53:12,910 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.534e+02 1.797e+02 2.198e+02 4.905e+02, threshold=3.594e+02, percent-clipped=2.0 +2023-04-27 19:53:27,657 INFO [finetune.py:976] (0/7) Epoch 24, batch 250, loss[loss=0.1728, simple_loss=0.2372, pruned_loss=0.05418, over 4739.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2378, pruned_loss=0.04768, over 681897.80 frames. ], batch size: 23, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:53:36,555 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-132000.pt +2023-04-27 19:53:57,460 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132031.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:54:01,449 INFO [finetune.py:976] (0/7) Epoch 24, batch 300, loss[loss=0.1609, simple_loss=0.2181, pruned_loss=0.05191, over 4723.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2413, pruned_loss=0.04877, over 740317.23 frames. ], batch size: 23, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:54:17,440 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.40 vs. limit=5.0 +2023-04-27 19:54:24,622 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132054.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:54:36,451 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132065.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:54:36,978 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.704e+02 1.925e+02 2.353e+02 6.924e+02, threshold=3.849e+02, percent-clipped=2.0 +2023-04-27 19:54:46,333 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132072.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:54:51,128 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5156, 1.3934, 0.4943, 1.2531, 1.4087, 1.3715, 1.2977, 1.3265], + device='cuda:0'), covar=tensor([0.0503, 0.0380, 0.0386, 0.0552, 0.0292, 0.0503, 0.0474, 0.0584], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0051], + device='cuda:0') +2023-04-27 19:55:00,436 INFO [finetune.py:976] (0/7) Epoch 24, batch 350, loss[loss=0.151, simple_loss=0.222, pruned_loss=0.03999, over 4095.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2441, pruned_loss=0.04971, over 788221.17 frames. ], batch size: 17, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:55:10,369 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132092.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:55:40,631 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=132113.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:55:41,934 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132115.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 19:55:44,962 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=132120.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:55:44,999 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3073, 2.8525, 1.0600, 1.6354, 2.1194, 1.2997, 3.6926, 1.6579], + device='cuda:0'), covar=tensor([0.0611, 0.0770, 0.0875, 0.1175, 0.0506, 0.0985, 0.0216, 0.0634], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0063, 0.0047, 0.0046, 0.0049, 0.0050, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0009, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 19:56:06,856 INFO [finetune.py:976] (0/7) Epoch 24, batch 400, loss[loss=0.181, simple_loss=0.2447, pruned_loss=0.05867, over 4835.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2447, pruned_loss=0.04976, over 825356.29 frames. ], batch size: 30, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:56:25,726 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132148.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:56:49,862 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.121e+02 1.575e+02 1.808e+02 2.256e+02 3.614e+02, threshold=3.616e+02, percent-clipped=0.0 +2023-04-27 19:57:08,716 INFO [finetune.py:976] (0/7) Epoch 24, batch 450, loss[loss=0.1678, simple_loss=0.2377, pruned_loss=0.04899, over 4812.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2438, pruned_loss=0.0493, over 853389.70 frames. ], batch size: 38, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:57:12,344 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5541, 2.5471, 2.0020, 2.2087, 2.4253, 2.0765, 3.1457, 1.8995], + device='cuda:0'), covar=tensor([0.3550, 0.1901, 0.4095, 0.3360, 0.1987, 0.2663, 0.1812, 0.4097], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0353, 0.0428, 0.0354, 0.0380, 0.0378, 0.0370, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 19:57:15,197 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=132196.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:57:27,640 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132213.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:57:31,908 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6814, 1.4654, 1.8976, 1.9892, 1.4810, 1.3863, 1.5730, 1.0652], + device='cuda:0'), covar=tensor([0.0462, 0.0670, 0.0327, 0.0450, 0.0692, 0.1056, 0.0514, 0.0524], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0067, 0.0066, 0.0067, 0.0074, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 19:57:42,206 INFO [finetune.py:976] (0/7) Epoch 24, batch 500, loss[loss=0.1436, simple_loss=0.2166, pruned_loss=0.03527, over 4821.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2403, pruned_loss=0.04778, over 876357.63 frames. ], batch size: 51, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:58:03,238 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.544e+02 1.768e+02 2.170e+02 4.737e+02, threshold=3.537e+02, percent-clipped=2.0 +2023-04-27 19:58:16,036 INFO [finetune.py:976] (0/7) Epoch 24, batch 550, loss[loss=0.1612, simple_loss=0.2347, pruned_loss=0.04392, over 4829.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2368, pruned_loss=0.04679, over 895997.55 frames. ], batch size: 33, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:58:47,054 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 19:58:49,839 INFO [finetune.py:976] (0/7) Epoch 24, batch 600, loss[loss=0.1704, simple_loss=0.2386, pruned_loss=0.05109, over 4719.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2372, pruned_loss=0.04684, over 910757.53 frames. ], batch size: 23, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:59:10,244 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.689e+02 1.942e+02 2.477e+02 4.504e+02, threshold=3.885e+02, percent-clipped=3.0 +2023-04-27 19:59:22,996 INFO [finetune.py:976] (0/7) Epoch 24, batch 650, loss[loss=0.169, simple_loss=0.2463, pruned_loss=0.04585, over 4824.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2413, pruned_loss=0.0486, over 920860.54 frames. ], batch size: 45, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 19:59:23,070 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132387.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 19:59:39,122 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=132410.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 19:59:55,062 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2052, 1.6873, 2.0829, 2.5252, 2.1980, 1.6520, 1.3431, 1.9574], + device='cuda:0'), covar=tensor([0.3155, 0.3114, 0.1684, 0.2345, 0.2404, 0.2658, 0.4257, 0.1869], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0245, 0.0227, 0.0313, 0.0221, 0.0235, 0.0227, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 20:00:05,784 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.84 vs. limit=5.0 +2023-04-27 20:00:08,101 INFO [finetune.py:976] (0/7) Epoch 24, batch 700, loss[loss=0.152, simple_loss=0.2206, pruned_loss=0.04166, over 4751.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2421, pruned_loss=0.04931, over 927370.10 frames. ], batch size: 26, lr: 3.07e-03, grad_scale: 32.0 +2023-04-27 20:00:50,348 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.633e+02 1.958e+02 2.360e+02 4.192e+02, threshold=3.915e+02, percent-clipped=4.0 +2023-04-27 20:01:15,830 INFO [finetune.py:976] (0/7) Epoch 24, batch 750, loss[loss=0.1684, simple_loss=0.2488, pruned_loss=0.04405, over 4811.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2437, pruned_loss=0.04959, over 930908.13 frames. ], batch size: 38, lr: 3.06e-03, grad_scale: 32.0 +2023-04-27 20:01:24,664 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-27 20:01:48,563 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132513.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:02:21,501 INFO [finetune.py:976] (0/7) Epoch 24, batch 800, loss[loss=0.1809, simple_loss=0.2464, pruned_loss=0.05764, over 4856.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2432, pruned_loss=0.04907, over 933921.35 frames. ], batch size: 31, lr: 3.06e-03, grad_scale: 32.0 +2023-04-27 20:02:53,589 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=132561.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:03:01,811 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.951e+01 1.478e+02 1.835e+02 2.240e+02 6.510e+02, threshold=3.671e+02, percent-clipped=1.0 +2023-04-27 20:03:27,811 INFO [finetune.py:976] (0/7) Epoch 24, batch 850, loss[loss=0.1805, simple_loss=0.2536, pruned_loss=0.05366, over 4902.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2419, pruned_loss=0.04894, over 938518.22 frames. ], batch size: 37, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:03:35,175 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2372, 1.2778, 5.3277, 4.9929, 4.6034, 5.0035, 4.7605, 4.7520], + device='cuda:0'), covar=tensor([0.7089, 0.6835, 0.1102, 0.2046, 0.1173, 0.1186, 0.1121, 0.2001], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0308, 0.0407, 0.0409, 0.0347, 0.0410, 0.0316, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:04:18,525 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2484, 1.1865, 3.8418, 3.5544, 3.3997, 3.7048, 3.7217, 3.3442], + device='cuda:0'), covar=tensor([0.7722, 0.6043, 0.1204, 0.1976, 0.1331, 0.1529, 0.1438, 0.1708], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0308, 0.0407, 0.0409, 0.0347, 0.0410, 0.0315, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:04:34,506 INFO [finetune.py:976] (0/7) Epoch 24, batch 900, loss[loss=0.1444, simple_loss=0.2115, pruned_loss=0.03863, over 4836.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2388, pruned_loss=0.04831, over 939398.65 frames. ], batch size: 38, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:04:43,286 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-27 20:05:02,120 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132660.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:05:12,901 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.989e+01 1.487e+02 1.792e+02 2.042e+02 3.425e+02, threshold=3.585e+02, percent-clipped=0.0 +2023-04-27 20:05:26,134 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-04-27 20:05:44,311 INFO [finetune.py:976] (0/7) Epoch 24, batch 950, loss[loss=0.2282, simple_loss=0.2906, pruned_loss=0.08289, over 4734.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.237, pruned_loss=0.04746, over 943561.32 frames. ], batch size: 59, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:05:44,395 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132687.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:06:08,252 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=132710.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 20:06:27,920 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132721.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:06:43,233 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=132735.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:06:49,486 INFO [finetune.py:976] (0/7) Epoch 24, batch 1000, loss[loss=0.2061, simple_loss=0.2772, pruned_loss=0.06745, over 4902.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2392, pruned_loss=0.04882, over 944355.38 frames. ], batch size: 37, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:06:55,170 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=132746.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:07:02,343 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=132758.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:07:02,373 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3053, 3.1831, 1.0820, 1.6741, 1.7256, 2.3508, 1.7921, 0.9588], + device='cuda:0'), covar=tensor([0.1422, 0.0857, 0.1817, 0.1284, 0.1123, 0.0958, 0.1463, 0.2003], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0239, 0.0137, 0.0120, 0.0133, 0.0152, 0.0116, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 20:07:07,765 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.549e+02 1.832e+02 2.153e+02 4.117e+02, threshold=3.664e+02, percent-clipped=1.0 +2023-04-27 20:07:22,248 INFO [finetune.py:976] (0/7) Epoch 24, batch 1050, loss[loss=0.2006, simple_loss=0.2598, pruned_loss=0.07069, over 4919.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2417, pruned_loss=0.04894, over 947846.59 frames. ], batch size: 33, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:07:35,145 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=132807.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:07:49,842 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6129, 1.3079, 4.0842, 3.8432, 3.5732, 3.8032, 3.7265, 3.5977], + device='cuda:0'), covar=tensor([0.6836, 0.5601, 0.1024, 0.1543, 0.1150, 0.1638, 0.2297, 0.1401], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0306, 0.0404, 0.0405, 0.0346, 0.0408, 0.0314, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:07:56,053 INFO [finetune.py:976] (0/7) Epoch 24, batch 1100, loss[loss=0.1436, simple_loss=0.2158, pruned_loss=0.03572, over 4771.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2444, pruned_loss=0.05005, over 951801.39 frames. ], batch size: 28, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:08:14,647 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.617e+01 1.589e+02 1.846e+02 2.225e+02 4.028e+02, threshold=3.692e+02, percent-clipped=2.0 +2023-04-27 20:08:28,715 INFO [finetune.py:976] (0/7) Epoch 24, batch 1150, loss[loss=0.1771, simple_loss=0.2549, pruned_loss=0.0497, over 4835.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2445, pruned_loss=0.04986, over 951892.78 frames. ], batch size: 49, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:08:32,531 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-27 20:09:11,141 INFO [finetune.py:976] (0/7) Epoch 24, batch 1200, loss[loss=0.1617, simple_loss=0.2271, pruned_loss=0.04815, over 4749.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2438, pruned_loss=0.04949, over 953351.77 frames. ], batch size: 27, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:09:48,368 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.050e+02 1.568e+02 1.953e+02 2.291e+02 3.731e+02, threshold=3.906e+02, percent-clipped=1.0 +2023-04-27 20:10:17,850 INFO [finetune.py:976] (0/7) Epoch 24, batch 1250, loss[loss=0.1747, simple_loss=0.2494, pruned_loss=0.05001, over 4867.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2416, pruned_loss=0.04866, over 954140.09 frames. ], batch size: 31, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:10:29,345 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0840, 2.5116, 0.9572, 1.5322, 1.5731, 1.9562, 1.6432, 0.8705], + device='cuda:0'), covar=tensor([0.1391, 0.1160, 0.1628, 0.1212, 0.1026, 0.0815, 0.1342, 0.1658], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0241, 0.0138, 0.0122, 0.0134, 0.0153, 0.0118, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 20:10:53,171 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133016.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:11:00,544 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9157, 2.8999, 2.2768, 3.3190, 2.9078, 2.9069, 1.2552, 2.8963], + device='cuda:0'), covar=tensor([0.2131, 0.1688, 0.3384, 0.2978, 0.3568, 0.2366, 0.5882, 0.2763], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0218, 0.0251, 0.0303, 0.0293, 0.0245, 0.0272, 0.0269], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 20:11:17,187 INFO [finetune.py:976] (0/7) Epoch 24, batch 1300, loss[loss=0.1156, simple_loss=0.1895, pruned_loss=0.02079, over 4745.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2384, pruned_loss=0.04756, over 951471.62 frames. ], batch size: 27, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:11:37,887 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.644e+02 1.911e+02 2.318e+02 6.826e+02, threshold=3.821e+02, percent-clipped=2.0 +2023-04-27 20:11:50,600 INFO [finetune.py:976] (0/7) Epoch 24, batch 1350, loss[loss=0.1494, simple_loss=0.2262, pruned_loss=0.03627, over 4807.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.239, pruned_loss=0.04795, over 951573.81 frames. ], batch size: 51, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:12:07,604 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133102.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:12:33,635 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.67 vs. limit=5.0 +2023-04-27 20:12:52,452 INFO [finetune.py:976] (0/7) Epoch 24, batch 1400, loss[loss=0.1788, simple_loss=0.2559, pruned_loss=0.05088, over 4819.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2426, pruned_loss=0.04863, over 951428.42 frames. ], batch size: 51, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:12:55,586 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0147, 1.2268, 5.2324, 4.9259, 4.5142, 5.0300, 4.5280, 4.6457], + device='cuda:0'), covar=tensor([0.6767, 0.6592, 0.0896, 0.1572, 0.1116, 0.1495, 0.1420, 0.1489], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0306, 0.0404, 0.0407, 0.0347, 0.0409, 0.0316, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:13:27,512 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9225, 2.2754, 0.9736, 1.2526, 1.6145, 1.1909, 2.4978, 1.4220], + device='cuda:0'), covar=tensor([0.0677, 0.0535, 0.0638, 0.1382, 0.0474, 0.1022, 0.0364, 0.0666], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0063, 0.0047, 0.0046, 0.0049, 0.0050, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0009, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 20:13:28,746 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9228, 2.2125, 0.9266, 1.2934, 1.5034, 1.2775, 2.4874, 1.4107], + device='cuda:0'), covar=tensor([0.0652, 0.0541, 0.0652, 0.1261, 0.0501, 0.0922, 0.0301, 0.0685], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0050, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0009, 0.0007, 0.0007, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 20:13:29,246 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.717e+02 2.018e+02 2.257e+02 3.714e+02, threshold=4.037e+02, percent-clipped=0.0 +2023-04-27 20:13:41,457 INFO [finetune.py:976] (0/7) Epoch 24, batch 1450, loss[loss=0.142, simple_loss=0.2218, pruned_loss=0.03112, over 4755.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2443, pruned_loss=0.04921, over 952449.47 frames. ], batch size: 28, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:14:31,505 INFO [finetune.py:976] (0/7) Epoch 24, batch 1500, loss[loss=0.2126, simple_loss=0.2828, pruned_loss=0.07122, over 4808.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2459, pruned_loss=0.0494, over 954769.74 frames. ], batch size: 39, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:15:14,559 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.609e+02 1.860e+02 2.213e+02 4.460e+02, threshold=3.721e+02, percent-clipped=1.0 +2023-04-27 20:15:44,450 INFO [finetune.py:976] (0/7) Epoch 24, batch 1550, loss[loss=0.1675, simple_loss=0.251, pruned_loss=0.04204, over 4907.00 frames. ], tot_loss[loss=0.1733, simple_loss=0.2467, pruned_loss=0.04995, over 952926.90 frames. ], batch size: 37, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:16:10,958 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133316.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:16:26,576 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0708, 1.3782, 5.3547, 4.9792, 4.5820, 5.0974, 4.6588, 4.7094], + device='cuda:0'), covar=tensor([0.6842, 0.6151, 0.0985, 0.1790, 0.1145, 0.1293, 0.1250, 0.1759], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0307, 0.0405, 0.0407, 0.0349, 0.0409, 0.0317, 0.0368], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:16:34,615 INFO [finetune.py:976] (0/7) Epoch 24, batch 1600, loss[loss=0.147, simple_loss=0.2137, pruned_loss=0.04012, over 4811.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2433, pruned_loss=0.04845, over 953503.44 frames. ], batch size: 51, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:16:44,928 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8955, 1.9463, 1.0698, 1.6429, 2.2632, 1.7698, 1.6678, 1.7006], + device='cuda:0'), covar=tensor([0.0449, 0.0329, 0.0279, 0.0491, 0.0223, 0.0483, 0.0442, 0.0497], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 20:17:10,329 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.33 vs. limit=5.0 +2023-04-27 20:17:16,921 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=133364.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:17:19,224 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.451e+02 1.794e+02 2.296e+02 6.131e+02, threshold=3.588e+02, percent-clipped=2.0 +2023-04-27 20:17:29,277 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133374.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 20:17:30,545 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133376.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:17:31,243 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 20:17:42,834 INFO [finetune.py:976] (0/7) Epoch 24, batch 1650, loss[loss=0.1749, simple_loss=0.2569, pruned_loss=0.04651, over 4918.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2398, pruned_loss=0.04731, over 953383.13 frames. ], batch size: 46, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:18:02,171 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=133402.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:18:04,138 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-27 20:18:35,950 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4487, 3.3054, 0.8836, 1.8731, 2.0649, 2.3082, 2.0089, 0.9612], + device='cuda:0'), covar=tensor([0.1390, 0.0869, 0.2000, 0.1204, 0.0978, 0.1038, 0.1451, 0.1918], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0239, 0.0137, 0.0121, 0.0134, 0.0152, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 20:18:48,114 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133435.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 20:18:49,167 INFO [finetune.py:976] (0/7) Epoch 24, batch 1700, loss[loss=0.1733, simple_loss=0.2501, pruned_loss=0.04821, over 4822.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2382, pruned_loss=0.04679, over 953855.05 frames. ], batch size: 30, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:18:49,280 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133437.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:19:08,004 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=133450.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:19:31,793 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.276e+01 1.467e+02 1.697e+02 2.216e+02 4.193e+02, threshold=3.394e+02, percent-clipped=4.0 +2023-04-27 20:19:41,280 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133474.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:19:53,317 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6842, 1.7004, 1.5988, 1.2136, 1.2801, 1.2800, 1.6299, 1.2225], + device='cuda:0'), covar=tensor([0.1761, 0.1512, 0.1467, 0.1765, 0.2375, 0.1945, 0.1019, 0.2049], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0212, 0.0169, 0.0205, 0.0200, 0.0186, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 20:19:55,042 INFO [finetune.py:976] (0/7) Epoch 24, batch 1750, loss[loss=0.1829, simple_loss=0.2517, pruned_loss=0.0571, over 4217.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2401, pruned_loss=0.04794, over 953138.23 frames. ], batch size: 65, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:20:05,378 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-27 20:21:01,611 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133535.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:21:08,175 INFO [finetune.py:976] (0/7) Epoch 24, batch 1800, loss[loss=0.1757, simple_loss=0.2449, pruned_loss=0.05326, over 4922.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.243, pruned_loss=0.04858, over 954225.02 frames. ], batch size: 38, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:21:22,978 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6208, 0.9764, 1.6296, 2.0830, 1.6945, 1.5758, 1.6465, 1.6125], + device='cuda:0'), covar=tensor([0.4169, 0.6659, 0.5631, 0.5304, 0.5395, 0.7344, 0.7171, 0.8246], + device='cuda:0'), in_proj_covar=tensor([0.0435, 0.0418, 0.0511, 0.0506, 0.0465, 0.0495, 0.0502, 0.0511], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:21:44,193 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.423e+01 1.601e+02 1.910e+02 2.304e+02 3.946e+02, threshold=3.820e+02, percent-clipped=1.0 +2023-04-27 20:21:54,275 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2204, 2.2158, 1.7113, 1.8548, 2.0603, 1.7306, 2.6145, 1.3551], + device='cuda:0'), covar=tensor([0.3428, 0.1737, 0.4225, 0.2857, 0.1836, 0.2681, 0.1219, 0.4953], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0351, 0.0424, 0.0349, 0.0376, 0.0373, 0.0367, 0.0418], + device='cuda:0'), out_proj_covar=tensor([9.9740e-05, 1.0486e-04, 1.2844e-04, 1.0485e-04, 1.1165e-04, 1.1128e-04, + 1.0777e-04, 1.2584e-04], device='cuda:0') +2023-04-27 20:22:08,895 INFO [finetune.py:976] (0/7) Epoch 24, batch 1850, loss[loss=0.1506, simple_loss=0.2244, pruned_loss=0.03845, over 4797.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.2457, pruned_loss=0.05006, over 953699.19 frames. ], batch size: 51, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:22:23,580 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3791, 2.9947, 1.0408, 1.7984, 2.3756, 1.4402, 4.1593, 1.8505], + device='cuda:0'), covar=tensor([0.0609, 0.0667, 0.0784, 0.1185, 0.0489, 0.0975, 0.0187, 0.0605], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 20:22:25,881 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3986, 1.3930, 1.4771, 1.6532, 1.6889, 1.3258, 0.9959, 1.4937], + device='cuda:0'), covar=tensor([0.0779, 0.1255, 0.0965, 0.0546, 0.0607, 0.0788, 0.0814, 0.0596], + device='cuda:0'), in_proj_covar=tensor([0.0187, 0.0205, 0.0187, 0.0173, 0.0179, 0.0181, 0.0151, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:22:42,122 INFO [finetune.py:976] (0/7) Epoch 24, batch 1900, loss[loss=0.1694, simple_loss=0.2407, pruned_loss=0.04902, over 4830.00 frames. ], tot_loss[loss=0.1739, simple_loss=0.247, pruned_loss=0.05036, over 955519.33 frames. ], batch size: 47, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:22:50,165 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9980, 1.0393, 1.6905, 1.8080, 1.7222, 1.8436, 1.7103, 1.6967], + device='cuda:0'), covar=tensor([0.4003, 0.5041, 0.4466, 0.4071, 0.5411, 0.6949, 0.4358, 0.4088], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0373, 0.0326, 0.0337, 0.0346, 0.0392, 0.0355, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 20:22:51,303 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133652.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:23:12,129 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.575e+02 1.806e+02 2.357e+02 5.107e+02, threshold=3.612e+02, percent-clipped=3.0 +2023-04-27 20:23:26,868 INFO [finetune.py:976] (0/7) Epoch 24, batch 1950, loss[loss=0.1192, simple_loss=0.1902, pruned_loss=0.02413, over 4714.00 frames. ], tot_loss[loss=0.172, simple_loss=0.2453, pruned_loss=0.04932, over 956385.32 frames. ], batch size: 23, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:23:37,433 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-27 20:23:43,300 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=133713.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:23:56,171 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133730.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 20:23:57,388 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133732.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:24:00,817 INFO [finetune.py:976] (0/7) Epoch 24, batch 2000, loss[loss=0.1381, simple_loss=0.2282, pruned_loss=0.02395, over 4930.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2409, pruned_loss=0.04742, over 955866.22 frames. ], batch size: 38, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:24:25,086 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.642e+01 1.427e+02 1.749e+02 2.000e+02 3.515e+02, threshold=3.497e+02, percent-clipped=0.0 +2023-04-27 20:24:25,834 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5994, 1.6338, 1.4427, 0.9827, 1.2514, 1.1835, 1.4534, 1.1902], + device='cuda:0'), covar=tensor([0.1596, 0.1247, 0.1513, 0.1864, 0.2179, 0.1914, 0.1002, 0.1958], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0210, 0.0169, 0.0205, 0.0200, 0.0185, 0.0156, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 20:24:53,609 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8452, 2.2601, 1.9070, 2.1606, 1.6661, 1.8995, 1.8739, 1.5226], + device='cuda:0'), covar=tensor([0.1687, 0.1033, 0.0767, 0.1030, 0.3053, 0.0903, 0.1606, 0.2299], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0302, 0.0216, 0.0277, 0.0315, 0.0256, 0.0250, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1454e-04, 1.1957e-04, 8.5092e-05, 1.0926e-04, 1.2691e-04, 1.0115e-04, + 1.0083e-04, 1.0456e-04], device='cuda:0') +2023-04-27 20:24:55,311 INFO [finetune.py:976] (0/7) Epoch 24, batch 2050, loss[loss=0.196, simple_loss=0.2603, pruned_loss=0.06584, over 4814.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2385, pruned_loss=0.0472, over 955460.41 frames. ], batch size: 41, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:25:06,934 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2148, 1.3768, 1.7517, 1.8712, 1.8570, 1.9335, 1.7503, 1.7856], + device='cuda:0'), covar=tensor([0.3911, 0.5406, 0.4333, 0.4378, 0.5281, 0.6928, 0.5400, 0.4745], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0376, 0.0328, 0.0339, 0.0348, 0.0395, 0.0358, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 20:25:24,996 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 20:25:48,582 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=133830.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:25:57,780 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3257, 3.1488, 0.9340, 1.7270, 1.6997, 2.3466, 1.8425, 0.9603], + device='cuda:0'), covar=tensor([0.1393, 0.1009, 0.1874, 0.1223, 0.1117, 0.0934, 0.1406, 0.2081], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0240, 0.0137, 0.0121, 0.0133, 0.0152, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 20:25:58,914 INFO [finetune.py:976] (0/7) Epoch 24, batch 2100, loss[loss=0.1683, simple_loss=0.2328, pruned_loss=0.05185, over 4886.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.238, pruned_loss=0.04743, over 956226.52 frames. ], batch size: 32, lr: 3.06e-03, grad_scale: 16.0 +2023-04-27 20:26:01,365 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6156, 3.5779, 2.6191, 4.1973, 3.6511, 3.6385, 1.6329, 3.5815], + device='cuda:0'), covar=tensor([0.1666, 0.1521, 0.3330, 0.1869, 0.3521, 0.1729, 0.5570, 0.2418], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0220, 0.0253, 0.0306, 0.0296, 0.0247, 0.0275, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 20:26:22,912 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.658e+02 2.018e+02 2.431e+02 5.100e+02, threshold=4.036e+02, percent-clipped=6.0 +2023-04-27 20:26:37,509 INFO [finetune.py:976] (0/7) Epoch 24, batch 2150, loss[loss=0.1285, simple_loss=0.1895, pruned_loss=0.03374, over 3807.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2414, pruned_loss=0.04875, over 955518.98 frames. ], batch size: 16, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:26:50,988 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-27 20:26:52,553 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4710, 1.4552, 0.6040, 1.2318, 1.3359, 1.3515, 1.2718, 1.3208], + device='cuda:0'), covar=tensor([0.0509, 0.0369, 0.0395, 0.0564, 0.0314, 0.0505, 0.0514, 0.0562], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0038, 0.0050, 0.0049, 0.0052], + device='cuda:0') +2023-04-27 20:27:08,320 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-27 20:27:09,764 INFO [finetune.py:976] (0/7) Epoch 24, batch 2200, loss[loss=0.2348, simple_loss=0.2937, pruned_loss=0.08791, over 4907.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2437, pruned_loss=0.0496, over 954531.85 frames. ], batch size: 43, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:27:48,472 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.369e+01 1.555e+02 1.795e+02 2.184e+02 7.045e+02, threshold=3.590e+02, percent-clipped=2.0 +2023-04-27 20:28:06,399 INFO [finetune.py:976] (0/7) Epoch 24, batch 2250, loss[loss=0.1731, simple_loss=0.2487, pruned_loss=0.04881, over 4892.00 frames. ], tot_loss[loss=0.1736, simple_loss=0.2455, pruned_loss=0.0508, over 952745.57 frames. ], batch size: 43, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:28:07,645 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4548, 0.7352, 1.4206, 1.8246, 1.5662, 1.4023, 1.4216, 1.4269], + device='cuda:0'), covar=tensor([0.3633, 0.5794, 0.4447, 0.5186, 0.4680, 0.6141, 0.6032, 0.6446], + device='cuda:0'), in_proj_covar=tensor([0.0434, 0.0417, 0.0511, 0.0506, 0.0465, 0.0495, 0.0501, 0.0511], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:28:14,512 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=133997.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:28:16,873 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-134000.pt +2023-04-27 20:28:18,764 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9077, 2.3806, 1.8806, 1.6525, 1.3595, 1.3806, 2.0192, 1.3491], + device='cuda:0'), covar=tensor([0.1688, 0.1407, 0.1410, 0.1772, 0.2240, 0.1921, 0.0901, 0.1926], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0210, 0.0169, 0.0205, 0.0199, 0.0185, 0.0156, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 20:28:23,004 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134008.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:28:36,980 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134030.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 20:28:38,184 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134032.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:28:41,584 INFO [finetune.py:976] (0/7) Epoch 24, batch 2300, loss[loss=0.1299, simple_loss=0.208, pruned_loss=0.02589, over 4691.00 frames. ], tot_loss[loss=0.1725, simple_loss=0.2454, pruned_loss=0.04983, over 954570.85 frames. ], batch size: 23, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:28:55,894 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134058.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:29:00,807 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2717, 1.4664, 1.9863, 2.5631, 2.1225, 1.5389, 1.5142, 1.8800], + device='cuda:0'), covar=tensor([0.3163, 0.3569, 0.1890, 0.2419, 0.2628, 0.2771, 0.4229, 0.2112], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0245, 0.0229, 0.0315, 0.0221, 0.0235, 0.0229, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 20:29:01,262 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.437e+01 1.459e+02 1.887e+02 2.167e+02 3.394e+02, threshold=3.773e+02, percent-clipped=1.0 +2023-04-27 20:29:08,087 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=134078.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 20:29:09,324 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=134080.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:29:09,463 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.55 vs. limit=2.0 +2023-04-27 20:29:14,456 INFO [finetune.py:976] (0/7) Epoch 24, batch 2350, loss[loss=0.1697, simple_loss=0.2385, pruned_loss=0.05046, over 4228.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2432, pruned_loss=0.04959, over 954053.52 frames. ], batch size: 66, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:29:25,727 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7955, 2.0386, 1.0159, 1.5375, 2.3344, 1.6510, 1.5760, 1.6765], + device='cuda:0'), covar=tensor([0.0454, 0.0338, 0.0284, 0.0525, 0.0215, 0.0458, 0.0448, 0.0541], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 20:29:30,570 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6127, 1.4178, 1.4041, 1.0734, 1.4172, 1.1961, 1.7426, 1.3022], + device='cuda:0'), covar=tensor([0.2977, 0.1619, 0.4576, 0.2230, 0.1366, 0.2094, 0.1390, 0.4736], + device='cuda:0'), in_proj_covar=tensor([0.0333, 0.0347, 0.0420, 0.0345, 0.0374, 0.0370, 0.0363, 0.0417], + device='cuda:0'), out_proj_covar=tensor([9.8575e-05, 1.0367e-04, 1.2714e-04, 1.0366e-04, 1.1098e-04, 1.1022e-04, + 1.0657e-04, 1.2533e-04], device='cuda:0') +2023-04-27 20:29:31,809 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9305, 0.6475, 0.7471, 0.7561, 1.0970, 0.8523, 0.7180, 0.7622], + device='cuda:0'), covar=tensor([0.1847, 0.1552, 0.2170, 0.1626, 0.1139, 0.1542, 0.1579, 0.2486], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0305, 0.0347, 0.0281, 0.0324, 0.0304, 0.0296, 0.0369], + device='cuda:0'), out_proj_covar=tensor([6.3245e-05, 6.2876e-05, 7.2983e-05, 5.6454e-05, 6.6815e-05, 6.3609e-05, + 6.1564e-05, 7.8197e-05], device='cuda:0') +2023-04-27 20:29:36,600 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4910, 3.0878, 1.0893, 1.8467, 2.5331, 1.4704, 4.0986, 1.8776], + device='cuda:0'), covar=tensor([0.0588, 0.0724, 0.0830, 0.1171, 0.0460, 0.0923, 0.0195, 0.0591], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 20:29:42,049 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134130.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:29:46,678 INFO [finetune.py:976] (0/7) Epoch 24, batch 2400, loss[loss=0.1341, simple_loss=0.2079, pruned_loss=0.03016, over 4943.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2407, pruned_loss=0.04855, over 955829.18 frames. ], batch size: 38, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:30:06,805 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.831e+01 1.496e+02 1.764e+02 2.072e+02 3.760e+02, threshold=3.528e+02, percent-clipped=0.0 +2023-04-27 20:30:13,578 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=134178.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:30:19,543 INFO [finetune.py:976] (0/7) Epoch 24, batch 2450, loss[loss=0.1735, simple_loss=0.2369, pruned_loss=0.05504, over 4902.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2373, pruned_loss=0.04738, over 956081.91 frames. ], batch size: 32, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:31:07,208 INFO [finetune.py:976] (0/7) Epoch 24, batch 2500, loss[loss=0.1845, simple_loss=0.2677, pruned_loss=0.05059, over 4943.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2383, pruned_loss=0.04775, over 957121.76 frames. ], batch size: 33, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:31:39,318 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.116e+02 1.581e+02 1.827e+02 2.358e+02 4.176e+02, threshold=3.655e+02, percent-clipped=4.0 +2023-04-27 20:31:43,073 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2729, 2.2231, 1.7757, 1.8993, 2.2730, 1.8399, 2.8338, 1.4829], + device='cuda:0'), covar=tensor([0.3818, 0.1991, 0.5099, 0.3186, 0.1989, 0.2889, 0.1470, 0.5074], + device='cuda:0'), in_proj_covar=tensor([0.0331, 0.0345, 0.0418, 0.0344, 0.0372, 0.0368, 0.0361, 0.0415], + device='cuda:0'), out_proj_covar=tensor([9.8064e-05, 1.0304e-04, 1.2661e-04, 1.0325e-04, 1.1041e-04, 1.0956e-04, + 1.0606e-04, 1.2478e-04], device='cuda:0') +2023-04-27 20:32:02,531 INFO [finetune.py:976] (0/7) Epoch 24, batch 2550, loss[loss=0.1314, simple_loss=0.2086, pruned_loss=0.0271, over 4785.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2419, pruned_loss=0.04858, over 955146.83 frames. ], batch size: 26, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:32:26,939 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134304.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:32:34,599 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134308.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:32:35,221 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134309.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:33:08,807 INFO [finetune.py:976] (0/7) Epoch 24, batch 2600, loss[loss=0.1983, simple_loss=0.2764, pruned_loss=0.06006, over 4732.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2439, pruned_loss=0.04916, over 956478.50 frames. ], batch size: 59, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:33:27,025 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134353.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:33:29,494 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=134356.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:33:42,133 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134365.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:33:43,227 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.588e+02 1.844e+02 2.194e+02 4.309e+02, threshold=3.689e+02, percent-clipped=2.0 +2023-04-27 20:33:45,765 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134370.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:33:53,992 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4152, 3.2075, 0.9516, 1.8145, 1.8372, 2.3862, 1.8260, 0.9189], + device='cuda:0'), covar=tensor([0.1412, 0.0970, 0.1885, 0.1222, 0.1087, 0.0952, 0.1538, 0.2012], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0239, 0.0136, 0.0120, 0.0133, 0.0152, 0.0116, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 20:34:07,320 INFO [finetune.py:976] (0/7) Epoch 24, batch 2650, loss[loss=0.1848, simple_loss=0.2426, pruned_loss=0.06346, over 4771.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2451, pruned_loss=0.04936, over 957348.95 frames. ], batch size: 27, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:34:37,835 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.95 vs. limit=5.0 +2023-04-27 20:35:02,816 INFO [finetune.py:976] (0/7) Epoch 24, batch 2700, loss[loss=0.1465, simple_loss=0.2162, pruned_loss=0.03845, over 4884.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2434, pruned_loss=0.04808, over 956602.28 frames. ], batch size: 32, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:35:23,806 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.513e+01 1.459e+02 1.776e+02 2.011e+02 3.451e+02, threshold=3.553e+02, percent-clipped=0.0 +2023-04-27 20:35:36,506 INFO [finetune.py:976] (0/7) Epoch 24, batch 2750, loss[loss=0.1792, simple_loss=0.2522, pruned_loss=0.05312, over 4907.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2407, pruned_loss=0.04741, over 955728.74 frames. ], batch size: 37, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:36:43,195 INFO [finetune.py:976] (0/7) Epoch 24, batch 2800, loss[loss=0.158, simple_loss=0.2329, pruned_loss=0.04157, over 4749.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2375, pruned_loss=0.04655, over 955597.29 frames. ], batch size: 27, lr: 3.05e-03, grad_scale: 16.0 +2023-04-27 20:37:23,287 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.232e+01 1.527e+02 1.797e+02 2.170e+02 3.863e+02, threshold=3.594e+02, percent-clipped=3.0 +2023-04-27 20:37:46,657 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-27 20:37:48,299 INFO [finetune.py:976] (0/7) Epoch 24, batch 2850, loss[loss=0.1672, simple_loss=0.2431, pruned_loss=0.04563, over 4920.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2371, pruned_loss=0.0465, over 957377.79 frames. ], batch size: 33, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:37:57,235 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.90 vs. limit=5.0 +2023-04-27 20:38:08,482 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1932, 1.5192, 1.4239, 1.6490, 1.6084, 2.0405, 1.3314, 3.5787], + device='cuda:0'), covar=tensor([0.0579, 0.0787, 0.0731, 0.1209, 0.0607, 0.0509, 0.0736, 0.0153], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 20:38:59,968 INFO [finetune.py:976] (0/7) Epoch 24, batch 2900, loss[loss=0.1801, simple_loss=0.2571, pruned_loss=0.05152, over 4906.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.24, pruned_loss=0.04779, over 956604.35 frames. ], batch size: 43, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:39:15,052 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134653.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:39:24,528 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134660.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:39:33,041 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=134665.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:39:34,174 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.095e+02 1.576e+02 1.923e+02 2.277e+02 4.210e+02, threshold=3.845e+02, percent-clipped=2.0 +2023-04-27 20:40:04,275 INFO [finetune.py:976] (0/7) Epoch 24, batch 2950, loss[loss=0.1964, simple_loss=0.2708, pruned_loss=0.06098, over 4752.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2423, pruned_loss=0.04836, over 955407.72 frames. ], batch size: 59, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:40:18,488 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=134701.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:40:27,714 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-27 20:40:36,816 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.55 vs. limit=5.0 +2023-04-27 20:41:00,406 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 20:41:09,548 INFO [finetune.py:976] (0/7) Epoch 24, batch 3000, loss[loss=0.1596, simple_loss=0.2347, pruned_loss=0.04224, over 4765.00 frames. ], tot_loss[loss=0.1716, simple_loss=0.245, pruned_loss=0.04914, over 955316.22 frames. ], batch size: 26, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:41:09,549 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 20:41:25,509 INFO [finetune.py:1010] (0/7) Epoch 24, validation: loss=0.1526, simple_loss=0.2221, pruned_loss=0.04154, over 2265189.00 frames. +2023-04-27 20:41:25,509 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 20:41:26,883 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4124, 2.0892, 2.4064, 2.7356, 2.7307, 2.0746, 1.9005, 2.2875], + device='cuda:0'), covar=tensor([0.0762, 0.1059, 0.0644, 0.0610, 0.0607, 0.0965, 0.0774, 0.0648], + device='cuda:0'), in_proj_covar=tensor([0.0188, 0.0206, 0.0187, 0.0174, 0.0180, 0.0181, 0.0152, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:41:38,955 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-27 20:41:44,218 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.083e+02 1.565e+02 1.917e+02 2.257e+02 3.857e+02, threshold=3.833e+02, percent-clipped=1.0 +2023-04-27 20:41:57,443 INFO [finetune.py:976] (0/7) Epoch 24, batch 3050, loss[loss=0.1356, simple_loss=0.2051, pruned_loss=0.03302, over 3897.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2456, pruned_loss=0.04894, over 954731.49 frames. ], batch size: 17, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:42:01,203 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-27 20:42:20,027 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3398, 1.8475, 2.2461, 2.5586, 2.2253, 1.7787, 1.3798, 2.0189], + device='cuda:0'), covar=tensor([0.2911, 0.2607, 0.1470, 0.2018, 0.2300, 0.2397, 0.4075, 0.1854], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0245, 0.0228, 0.0315, 0.0221, 0.0235, 0.0229, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 20:42:30,089 INFO [finetune.py:976] (0/7) Epoch 24, batch 3100, loss[loss=0.1671, simple_loss=0.2337, pruned_loss=0.05029, over 4891.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2432, pruned_loss=0.04841, over 954159.21 frames. ], batch size: 32, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:42:47,424 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=134862.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:42:50,371 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.815e+01 1.536e+02 1.798e+02 2.119e+02 3.796e+02, threshold=3.595e+02, percent-clipped=0.0 +2023-04-27 20:43:02,614 INFO [finetune.py:976] (0/7) Epoch 24, batch 3150, loss[loss=0.1676, simple_loss=0.2307, pruned_loss=0.05227, over 4839.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2392, pruned_loss=0.04696, over 954650.57 frames. ], batch size: 30, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:43:28,084 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=134923.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:43:36,615 INFO [finetune.py:976] (0/7) Epoch 24, batch 3200, loss[loss=0.1721, simple_loss=0.2373, pruned_loss=0.05346, over 4928.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2364, pruned_loss=0.04649, over 953744.23 frames. ], batch size: 38, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:43:53,551 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134960.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:43:56,595 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=134965.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:43:57,661 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.048e+02 1.515e+02 1.832e+02 2.291e+02 6.391e+02, threshold=3.663e+02, percent-clipped=4.0 +2023-04-27 20:44:10,014 INFO [finetune.py:976] (0/7) Epoch 24, batch 3250, loss[loss=0.181, simple_loss=0.2432, pruned_loss=0.05937, over 4147.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2368, pruned_loss=0.04687, over 955161.28 frames. ], batch size: 65, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:44:25,317 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=135008.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:44:28,801 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=135013.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:44:31,245 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9578, 2.4463, 1.1163, 1.3564, 2.0597, 1.1916, 3.1757, 1.6994], + device='cuda:0'), covar=tensor([0.0712, 0.0636, 0.0768, 0.1287, 0.0482, 0.1064, 0.0262, 0.0630], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0063, 0.0047, 0.0046, 0.0049, 0.0050, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0009, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 20:44:43,394 INFO [finetune.py:976] (0/7) Epoch 24, batch 3300, loss[loss=0.1655, simple_loss=0.2487, pruned_loss=0.04115, over 4867.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2419, pruned_loss=0.04838, over 954430.76 frames. ], batch size: 31, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:45:15,629 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.097e+02 1.494e+02 1.834e+02 2.222e+02 6.565e+02, threshold=3.668e+02, percent-clipped=2.0 +2023-04-27 20:45:24,425 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1890, 1.5785, 1.9997, 2.5043, 1.9940, 1.5677, 1.3979, 1.8173], + device='cuda:0'), covar=tensor([0.3170, 0.3166, 0.1705, 0.2023, 0.2632, 0.2749, 0.4029, 0.2020], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0245, 0.0228, 0.0314, 0.0221, 0.0234, 0.0228, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 20:45:44,561 INFO [finetune.py:976] (0/7) Epoch 24, batch 3350, loss[loss=0.1763, simple_loss=0.2516, pruned_loss=0.05054, over 4755.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2424, pruned_loss=0.04823, over 952399.58 frames. ], batch size: 27, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:46:05,326 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0787, 2.6165, 1.0253, 1.4740, 2.2508, 1.1624, 3.6547, 1.7108], + device='cuda:0'), covar=tensor([0.0696, 0.0752, 0.0843, 0.1284, 0.0465, 0.1074, 0.0202, 0.0602], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0009, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 20:46:49,141 INFO [finetune.py:976] (0/7) Epoch 24, batch 3400, loss[loss=0.2155, simple_loss=0.2839, pruned_loss=0.0736, over 4813.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.243, pruned_loss=0.04872, over 952136.53 frames. ], batch size: 33, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:46:51,235 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-27 20:47:25,516 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.583e+02 1.888e+02 2.345e+02 4.447e+02, threshold=3.777e+02, percent-clipped=3.0 +2023-04-27 20:47:33,944 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135171.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:47:54,153 INFO [finetune.py:976] (0/7) Epoch 24, batch 3450, loss[loss=0.1829, simple_loss=0.2522, pruned_loss=0.05675, over 4915.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2429, pruned_loss=0.04861, over 951768.50 frames. ], batch size: 43, lr: 3.05e-03, grad_scale: 32.0 +2023-04-27 20:47:56,161 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0379, 1.8807, 2.2415, 2.4814, 2.0687, 1.9904, 2.1297, 2.0924], + device='cuda:0'), covar=tensor([0.4951, 0.7234, 0.8018, 0.6048, 0.6526, 0.9528, 0.9559, 1.0458], + device='cuda:0'), in_proj_covar=tensor([0.0437, 0.0420, 0.0512, 0.0507, 0.0466, 0.0499, 0.0504, 0.0514], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:48:14,487 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6423, 2.0557, 1.7605, 1.9611, 1.5231, 1.7824, 1.7236, 1.3300], + device='cuda:0'), covar=tensor([0.1858, 0.1325, 0.0869, 0.1158, 0.3474, 0.1041, 0.1811, 0.2770], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0298, 0.0214, 0.0275, 0.0312, 0.0253, 0.0248, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1310e-04, 1.1783e-04, 8.4234e-05, 1.0865e-04, 1.2580e-04, 9.9857e-05, + 1.0032e-04, 1.0395e-04], device='cuda:0') +2023-04-27 20:48:24,218 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3557, 1.0332, 0.3569, 1.1720, 1.0191, 1.2730, 1.2038, 1.2302], + device='cuda:0'), covar=tensor([0.0392, 0.0328, 0.0339, 0.0463, 0.0258, 0.0414, 0.0378, 0.0452], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0053, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 20:48:27,439 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135218.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:48:36,537 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135232.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:48:39,438 INFO [finetune.py:976] (0/7) Epoch 24, batch 3500, loss[loss=0.1475, simple_loss=0.2141, pruned_loss=0.04043, over 4866.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2393, pruned_loss=0.0473, over 953150.01 frames. ], batch size: 31, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:48:59,253 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.073e+02 1.566e+02 1.818e+02 2.142e+02 4.699e+02, threshold=3.637e+02, percent-clipped=2.0 +2023-04-27 20:49:13,318 INFO [finetune.py:976] (0/7) Epoch 24, batch 3550, loss[loss=0.1666, simple_loss=0.241, pruned_loss=0.04612, over 4805.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2381, pruned_loss=0.04745, over 954183.70 frames. ], batch size: 25, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:49:15,890 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7775, 1.3652, 1.8695, 2.3078, 1.8877, 1.7431, 1.8203, 1.7647], + device='cuda:0'), covar=tensor([0.4648, 0.6760, 0.6671, 0.5807, 0.6059, 0.7905, 0.8397, 0.9246], + device='cuda:0'), in_proj_covar=tensor([0.0435, 0.0419, 0.0511, 0.0506, 0.0465, 0.0498, 0.0503, 0.0513], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:49:37,339 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7463, 2.2954, 2.5531, 3.2711, 2.6061, 2.1342, 2.1102, 2.5198], + device='cuda:0'), covar=tensor([0.2991, 0.2891, 0.1575, 0.2286, 0.2644, 0.2444, 0.3640, 0.1911], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0245, 0.0228, 0.0315, 0.0222, 0.0235, 0.0229, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 20:49:47,282 INFO [finetune.py:976] (0/7) Epoch 24, batch 3600, loss[loss=0.1676, simple_loss=0.2291, pruned_loss=0.05302, over 4868.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2359, pruned_loss=0.04659, over 953787.40 frames. ], batch size: 31, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:49:57,696 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.0843, 3.4574, 1.3537, 2.3257, 2.8372, 2.3088, 4.6735, 2.6315], + device='cuda:0'), covar=tensor([0.0487, 0.0531, 0.0765, 0.1154, 0.0468, 0.0782, 0.0225, 0.0524], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 20:50:04,898 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6145, 1.2962, 1.3279, 1.3300, 1.7581, 1.3967, 1.2069, 1.2676], + device='cuda:0'), covar=tensor([0.1527, 0.1401, 0.1954, 0.1358, 0.0973, 0.1584, 0.2115, 0.2387], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0304, 0.0347, 0.0281, 0.0324, 0.0302, 0.0296, 0.0367], + device='cuda:0'), out_proj_covar=tensor([6.3342e-05, 6.2632e-05, 7.2945e-05, 5.6274e-05, 6.6584e-05, 6.3320e-05, + 6.1527e-05, 7.7868e-05], device='cuda:0') +2023-04-27 20:50:05,966 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.034e+02 1.602e+02 1.884e+02 2.235e+02 3.277e+02, threshold=3.769e+02, percent-clipped=0.0 +2023-04-27 20:50:20,184 INFO [finetune.py:976] (0/7) Epoch 24, batch 3650, loss[loss=0.1865, simple_loss=0.2637, pruned_loss=0.05469, over 4820.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2393, pruned_loss=0.04811, over 953718.16 frames. ], batch size: 39, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:50:21,580 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.85 vs. limit=5.0 +2023-04-27 20:50:25,050 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.2415, 4.2368, 3.1366, 4.9347, 4.3393, 4.1950, 1.6886, 4.2787], + device='cuda:0'), covar=tensor([0.1767, 0.1008, 0.3603, 0.1042, 0.2928, 0.1788, 0.6384, 0.2454], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0220, 0.0253, 0.0308, 0.0298, 0.0250, 0.0276, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 20:50:52,195 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.5426, 3.5348, 2.8324, 4.1389, 3.5130, 3.5156, 1.7018, 3.4820], + device='cuda:0'), covar=tensor([0.1684, 0.1385, 0.3460, 0.1536, 0.3200, 0.1736, 0.5374, 0.2722], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0220, 0.0254, 0.0308, 0.0298, 0.0250, 0.0276, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 20:50:53,966 INFO [finetune.py:976] (0/7) Epoch 24, batch 3700, loss[loss=0.1474, simple_loss=0.2309, pruned_loss=0.03196, over 4766.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2409, pruned_loss=0.04797, over 951811.65 frames. ], batch size: 28, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:51:02,399 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3227, 2.2015, 2.1027, 1.8708, 2.2274, 1.9687, 2.8092, 1.8312], + device='cuda:0'), covar=tensor([0.2682, 0.1679, 0.3415, 0.2724, 0.1346, 0.2149, 0.1183, 0.3345], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0353, 0.0425, 0.0351, 0.0379, 0.0377, 0.0369, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:51:12,478 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.055e+02 1.617e+02 1.866e+02 2.198e+02 4.179e+02, threshold=3.733e+02, percent-clipped=1.0 +2023-04-27 20:51:27,097 INFO [finetune.py:976] (0/7) Epoch 24, batch 3750, loss[loss=0.1598, simple_loss=0.2293, pruned_loss=0.04516, over 4844.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2425, pruned_loss=0.04839, over 953076.29 frames. ], batch size: 49, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:51:46,873 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3868, 2.9159, 0.8288, 1.6416, 1.7354, 2.1990, 1.7508, 0.9518], + device='cuda:0'), covar=tensor([0.1254, 0.0862, 0.1961, 0.1213, 0.1014, 0.0960, 0.1450, 0.1940], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0236, 0.0135, 0.0120, 0.0131, 0.0151, 0.0116, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 20:52:02,869 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4073, 1.3279, 1.7574, 1.6968, 1.3077, 1.2252, 1.3677, 0.8853], + device='cuda:0'), covar=tensor([0.0504, 0.0643, 0.0327, 0.0582, 0.0707, 0.1100, 0.0596, 0.0560], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0066, 0.0068, 0.0075, 0.0095, 0.0073, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 20:52:05,312 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=135518.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:52:16,493 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135527.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:52:29,077 INFO [finetune.py:976] (0/7) Epoch 24, batch 3800, loss[loss=0.1599, simple_loss=0.2412, pruned_loss=0.03935, over 4851.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2443, pruned_loss=0.04893, over 952491.48 frames. ], batch size: 44, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:53:08,358 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=135566.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:53:08,923 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.795e+01 1.517e+02 1.799e+02 2.182e+02 4.922e+02, threshold=3.597e+02, percent-clipped=3.0 +2023-04-27 20:53:32,878 INFO [finetune.py:976] (0/7) Epoch 24, batch 3850, loss[loss=0.1437, simple_loss=0.2161, pruned_loss=0.03568, over 4700.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.243, pruned_loss=0.04811, over 952247.90 frames. ], batch size: 23, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:53:50,652 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7320, 1.7088, 0.8750, 1.3855, 1.8469, 1.5696, 1.4502, 1.5487], + device='cuda:0'), covar=tensor([0.0463, 0.0358, 0.0328, 0.0539, 0.0265, 0.0481, 0.0489, 0.0548], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 20:54:23,657 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135630.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:54:33,845 INFO [finetune.py:976] (0/7) Epoch 24, batch 3900, loss[loss=0.1767, simple_loss=0.25, pruned_loss=0.05172, over 4820.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2401, pruned_loss=0.04759, over 954802.22 frames. ], batch size: 38, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:55:15,859 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.029e+02 1.536e+02 1.810e+02 2.224e+02 3.785e+02, threshold=3.619e+02, percent-clipped=2.0 +2023-04-27 20:55:23,882 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.19 vs. limit=5.0 +2023-04-27 20:55:44,888 INFO [finetune.py:976] (0/7) Epoch 24, batch 3950, loss[loss=0.1146, simple_loss=0.192, pruned_loss=0.01863, over 4774.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2378, pruned_loss=0.04719, over 955950.13 frames. ], batch size: 28, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:55:47,438 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=135691.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:56:44,335 INFO [finetune.py:976] (0/7) Epoch 24, batch 4000, loss[loss=0.1624, simple_loss=0.2361, pruned_loss=0.04437, over 4752.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2384, pruned_loss=0.04782, over 954826.03 frames. ], batch size: 28, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:57:21,758 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0971, 1.3685, 1.2759, 1.5732, 1.4217, 1.5996, 1.3012, 2.2711], + device='cuda:0'), covar=tensor([0.0602, 0.0758, 0.0781, 0.1065, 0.0624, 0.0424, 0.0685, 0.0270], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0042, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 20:57:22,844 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.579e+02 1.856e+02 2.321e+02 4.338e+02, threshold=3.712e+02, percent-clipped=1.0 +2023-04-27 20:57:34,240 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5630, 2.1977, 1.8252, 1.9960, 2.2479, 1.9639, 2.5479, 1.6621], + device='cuda:0'), covar=tensor([0.2642, 0.1482, 0.3780, 0.2291, 0.1442, 0.1822, 0.1437, 0.3825], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0354, 0.0428, 0.0353, 0.0380, 0.0378, 0.0370, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 20:57:52,045 INFO [finetune.py:976] (0/7) Epoch 24, batch 4050, loss[loss=0.1811, simple_loss=0.2574, pruned_loss=0.05238, over 4846.00 frames. ], tot_loss[loss=0.1717, simple_loss=0.2433, pruned_loss=0.05006, over 952641.31 frames. ], batch size: 44, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:58:04,438 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-27 20:58:39,884 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=135827.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:58:52,538 INFO [finetune.py:976] (0/7) Epoch 24, batch 4100, loss[loss=0.15, simple_loss=0.2254, pruned_loss=0.03731, over 4779.00 frames. ], tot_loss[loss=0.1727, simple_loss=0.2452, pruned_loss=0.05014, over 952018.98 frames. ], batch size: 29, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:59:27,793 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3452, 1.6242, 1.8069, 1.9375, 1.7869, 1.8350, 1.8504, 1.8611], + device='cuda:0'), covar=tensor([0.3799, 0.5071, 0.4390, 0.4197, 0.5402, 0.7142, 0.4907, 0.4357], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0374, 0.0326, 0.0340, 0.0348, 0.0395, 0.0357, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 20:59:28,241 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.032e+02 1.687e+02 1.984e+02 2.360e+02 5.004e+02, threshold=3.968e+02, percent-clipped=3.0 +2023-04-27 20:59:33,107 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=135875.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 20:59:40,397 INFO [finetune.py:976] (0/7) Epoch 24, batch 4150, loss[loss=0.1262, simple_loss=0.2035, pruned_loss=0.02445, over 4795.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2454, pruned_loss=0.04923, over 953958.43 frames. ], batch size: 25, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 20:59:44,653 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9523, 1.6742, 1.8914, 2.2157, 2.2659, 1.7954, 1.6321, 2.0421], + device='cuda:0'), covar=tensor([0.0743, 0.1171, 0.0718, 0.0549, 0.0589, 0.0802, 0.0740, 0.0544], + device='cuda:0'), in_proj_covar=tensor([0.0190, 0.0207, 0.0189, 0.0176, 0.0181, 0.0182, 0.0154, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 21:00:14,126 INFO [finetune.py:976] (0/7) Epoch 24, batch 4200, loss[loss=0.1579, simple_loss=0.2277, pruned_loss=0.04404, over 4852.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2458, pruned_loss=0.04917, over 955512.75 frames. ], batch size: 31, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:00:35,175 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.525e+02 1.774e+02 2.091e+02 5.050e+02, threshold=3.548e+02, percent-clipped=3.0 +2023-04-27 21:00:44,766 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135982.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:00:47,164 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=135986.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:00:47,701 INFO [finetune.py:976] (0/7) Epoch 24, batch 4250, loss[loss=0.1495, simple_loss=0.2202, pruned_loss=0.03941, over 4930.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2433, pruned_loss=0.04861, over 957065.03 frames. ], batch size: 33, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:00:52,084 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=135994.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:00:56,772 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-136000.pt +2023-04-27 21:01:07,844 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9226, 2.7765, 2.2600, 3.2917, 2.8576, 2.9065, 1.2417, 2.7810], + device='cuda:0'), covar=tensor([0.1998, 0.1639, 0.3161, 0.2957, 0.2671, 0.2205, 0.5778, 0.2866], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0219, 0.0253, 0.0307, 0.0296, 0.0249, 0.0275, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:01:22,183 INFO [finetune.py:976] (0/7) Epoch 24, batch 4300, loss[loss=0.165, simple_loss=0.235, pruned_loss=0.04748, over 4866.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2394, pruned_loss=0.04694, over 958528.38 frames. ], batch size: 44, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:01:26,005 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136043.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:01:29,144 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.95 vs. limit=5.0 +2023-04-27 21:01:34,330 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136055.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:01:42,938 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.901e+01 1.479e+02 1.753e+02 2.087e+02 3.589e+02, threshold=3.506e+02, percent-clipped=1.0 +2023-04-27 21:01:55,527 INFO [finetune.py:976] (0/7) Epoch 24, batch 4350, loss[loss=0.1894, simple_loss=0.2556, pruned_loss=0.06155, over 4863.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2377, pruned_loss=0.04653, over 959079.53 frames. ], batch size: 31, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:02:33,974 INFO [finetune.py:976] (0/7) Epoch 24, batch 4400, loss[loss=0.2016, simple_loss=0.2691, pruned_loss=0.06709, over 4899.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2372, pruned_loss=0.04691, over 956994.25 frames. ], batch size: 35, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:02:42,608 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136142.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:03:16,198 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.001e+02 1.622e+02 1.920e+02 2.374e+02 4.375e+02, threshold=3.840e+02, percent-clipped=3.0 +2023-04-27 21:03:40,890 INFO [finetune.py:976] (0/7) Epoch 24, batch 4450, loss[loss=0.2256, simple_loss=0.2957, pruned_loss=0.07779, over 4809.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2412, pruned_loss=0.0486, over 953777.09 frames. ], batch size: 51, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:03:59,051 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7683, 2.5193, 1.8619, 1.8193, 1.2775, 1.3204, 1.9362, 1.2880], + device='cuda:0'), covar=tensor([0.1733, 0.1334, 0.1418, 0.1717, 0.2312, 0.1984, 0.0966, 0.2048], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0212, 0.0170, 0.0206, 0.0201, 0.0186, 0.0157, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 21:04:02,267 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136203.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:04:53,819 INFO [finetune.py:976] (0/7) Epoch 24, batch 4500, loss[loss=0.1957, simple_loss=0.2645, pruned_loss=0.06349, over 4812.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2439, pruned_loss=0.04931, over 954754.40 frames. ], batch size: 33, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:04:54,546 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3122, 1.5900, 1.4078, 1.7601, 1.6830, 2.0457, 1.4643, 3.3901], + device='cuda:0'), covar=tensor([0.0559, 0.0736, 0.0717, 0.1066, 0.0565, 0.0543, 0.0684, 0.0150], + device='cuda:0'), in_proj_covar=tensor([0.0038, 0.0038, 0.0039, 0.0042, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 21:05:26,322 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0531, 2.6542, 1.1390, 1.5834, 2.2015, 1.2176, 3.5317, 1.9315], + device='cuda:0'), covar=tensor([0.0661, 0.0641, 0.0751, 0.1158, 0.0449, 0.0996, 0.0199, 0.0556], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0048, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 21:05:29,627 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.760e+01 1.501e+02 1.833e+02 2.147e+02 4.216e+02, threshold=3.666e+02, percent-clipped=1.0 +2023-04-27 21:05:29,848 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-27 21:05:58,030 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136286.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:05:58,528 INFO [finetune.py:976] (0/7) Epoch 24, batch 4550, loss[loss=0.1943, simple_loss=0.2762, pruned_loss=0.05618, over 4817.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2444, pruned_loss=0.04905, over 954973.77 frames. ], batch size: 47, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:06:56,730 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=136334.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:06:57,431 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136335.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:07:04,272 INFO [finetune.py:976] (0/7) Epoch 24, batch 4600, loss[loss=0.1893, simple_loss=0.2644, pruned_loss=0.05706, over 4886.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2441, pruned_loss=0.04899, over 953872.26 frames. ], batch size: 35, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:07:04,987 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136338.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:07:18,413 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136350.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:07:40,419 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.579e+02 1.858e+02 2.127e+02 3.942e+02, threshold=3.716e+02, percent-clipped=1.0 +2023-04-27 21:08:03,895 INFO [finetune.py:976] (0/7) Epoch 24, batch 4650, loss[loss=0.1886, simple_loss=0.2534, pruned_loss=0.06193, over 4872.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2427, pruned_loss=0.04905, over 954031.21 frames. ], batch size: 34, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:08:20,793 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136396.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:08:25,717 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5883, 1.6705, 1.4644, 1.1471, 1.2857, 1.2204, 1.4905, 1.1880], + device='cuda:0'), covar=tensor([0.1725, 0.1502, 0.1546, 0.1738, 0.2279, 0.2045, 0.1047, 0.2024], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0210, 0.0168, 0.0204, 0.0199, 0.0184, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 21:09:13,811 INFO [finetune.py:976] (0/7) Epoch 24, batch 4700, loss[loss=0.1421, simple_loss=0.2157, pruned_loss=0.03423, over 4863.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2406, pruned_loss=0.04863, over 953319.00 frames. ], batch size: 49, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:09:45,097 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.527e+02 1.865e+02 2.204e+02 5.324e+02, threshold=3.729e+02, percent-clipped=2.0 +2023-04-27 21:09:58,843 INFO [finetune.py:976] (0/7) Epoch 24, batch 4750, loss[loss=0.1406, simple_loss=0.2131, pruned_loss=0.03406, over 4816.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2389, pruned_loss=0.04803, over 954450.34 frames. ], batch size: 38, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:10:07,130 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136498.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:10:47,890 INFO [finetune.py:976] (0/7) Epoch 24, batch 4800, loss[loss=0.1918, simple_loss=0.2751, pruned_loss=0.05422, over 4847.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2399, pruned_loss=0.04812, over 954633.05 frames. ], batch size: 49, lr: 3.04e-03, grad_scale: 32.0 +2023-04-27 21:11:22,758 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.4091, 3.4261, 2.6087, 3.9894, 3.4627, 3.3801, 1.6567, 3.2975], + device='cuda:0'), covar=tensor([0.1901, 0.1471, 0.4333, 0.2145, 0.3047, 0.2017, 0.5702, 0.2855], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0222, 0.0256, 0.0311, 0.0300, 0.0251, 0.0278, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:11:29,602 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.193e+02 1.561e+02 1.788e+02 2.083e+02 3.546e+02, threshold=3.576e+02, percent-clipped=0.0 +2023-04-27 21:11:50,702 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4182, 1.6585, 1.9054, 1.9758, 1.8726, 1.8932, 1.8851, 1.9190], + device='cuda:0'), covar=tensor([0.3600, 0.5161, 0.4342, 0.4313, 0.5183, 0.6479, 0.4792, 0.4549], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0371, 0.0323, 0.0337, 0.0345, 0.0392, 0.0354, 0.0328], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:11:53,501 INFO [finetune.py:976] (0/7) Epoch 24, batch 4850, loss[loss=0.1491, simple_loss=0.2145, pruned_loss=0.04186, over 4736.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2424, pruned_loss=0.04839, over 953859.18 frames. ], batch size: 23, lr: 3.04e-03, grad_scale: 64.0 +2023-04-27 21:12:26,524 INFO [finetune.py:976] (0/7) Epoch 24, batch 4900, loss[loss=0.1934, simple_loss=0.2712, pruned_loss=0.05779, over 4904.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2432, pruned_loss=0.04823, over 951498.15 frames. ], batch size: 36, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:12:27,242 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136638.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:12:30,260 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136643.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:12:30,312 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6564, 0.7301, 1.4590, 1.9721, 1.7156, 1.5353, 1.5254, 1.5254], + device='cuda:0'), covar=tensor([0.4104, 0.6655, 0.5929, 0.5636, 0.5467, 0.6938, 0.6832, 0.8131], + device='cuda:0'), in_proj_covar=tensor([0.0437, 0.0421, 0.0512, 0.0509, 0.0467, 0.0500, 0.0503, 0.0517], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 21:12:35,597 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136650.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:12:46,886 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.047e+02 1.604e+02 1.891e+02 2.229e+02 4.573e+02, threshold=3.781e+02, percent-clipped=1.0 +2023-04-27 21:12:58,277 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=136686.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:12:58,835 INFO [finetune.py:976] (0/7) Epoch 24, batch 4950, loss[loss=0.1303, simple_loss=0.1978, pruned_loss=0.03142, over 4080.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2436, pruned_loss=0.04799, over 950795.30 frames. ], batch size: 17, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:13:00,661 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.72 vs. limit=5.0 +2023-04-27 21:13:02,151 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136691.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:13:06,949 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=136698.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:13:11,666 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136704.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:13:23,631 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 21:13:29,019 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1154, 2.6305, 1.1367, 1.5641, 2.0621, 1.3801, 3.6408, 1.9340], + device='cuda:0'), covar=tensor([0.0656, 0.0669, 0.0792, 0.1235, 0.0474, 0.0924, 0.0257, 0.0549], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0048, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0007, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 21:13:32,451 INFO [finetune.py:976] (0/7) Epoch 24, batch 5000, loss[loss=0.1536, simple_loss=0.2256, pruned_loss=0.04083, over 4926.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2427, pruned_loss=0.0481, over 948892.35 frames. ], batch size: 38, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:13:53,643 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.571e+02 1.815e+02 2.179e+02 3.304e+02, threshold=3.630e+02, percent-clipped=0.0 +2023-04-27 21:14:05,877 INFO [finetune.py:976] (0/7) Epoch 24, batch 5050, loss[loss=0.1685, simple_loss=0.2446, pruned_loss=0.04619, over 4905.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2406, pruned_loss=0.04758, over 948667.10 frames. ], batch size: 37, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:14:17,190 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-27 21:14:18,828 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136798.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:14:29,866 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=136806.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:15:01,715 INFO [finetune.py:976] (0/7) Epoch 24, batch 5100, loss[loss=0.1316, simple_loss=0.2079, pruned_loss=0.02764, over 4903.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2371, pruned_loss=0.04642, over 951348.11 frames. ], batch size: 36, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:15:07,736 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=136846.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:15:20,880 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-27 21:15:22,612 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=136867.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:15:23,110 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.537e+02 1.873e+02 2.196e+02 3.846e+02, threshold=3.746e+02, percent-clipped=1.0 +2023-04-27 21:15:31,064 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9175, 1.1619, 1.6582, 1.7603, 1.7150, 1.7991, 1.6716, 1.6484], + device='cuda:0'), covar=tensor([0.3765, 0.4726, 0.4067, 0.4076, 0.5252, 0.6751, 0.4307, 0.4096], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0373, 0.0325, 0.0340, 0.0348, 0.0394, 0.0358, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:15:34,085 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6729, 1.2758, 1.3377, 1.4012, 1.8420, 1.4593, 1.2200, 1.2838], + device='cuda:0'), covar=tensor([0.1526, 0.1332, 0.1734, 0.1181, 0.0660, 0.1386, 0.1957, 0.2089], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0306, 0.0348, 0.0282, 0.0324, 0.0302, 0.0296, 0.0370], + device='cuda:0'), out_proj_covar=tensor([6.3718e-05, 6.2945e-05, 7.3268e-05, 5.6538e-05, 6.6593e-05, 6.3187e-05, + 6.1567e-05, 7.8351e-05], device='cuda:0') +2023-04-27 21:15:35,187 INFO [finetune.py:976] (0/7) Epoch 24, batch 5150, loss[loss=0.1892, simple_loss=0.2695, pruned_loss=0.05446, over 4758.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2372, pruned_loss=0.04642, over 952098.46 frames. ], batch size: 59, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:16:17,510 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2537, 1.2036, 1.2699, 1.4405, 1.5585, 1.2684, 0.9123, 1.3887], + device='cuda:0'), covar=tensor([0.0808, 0.1286, 0.0903, 0.0592, 0.0640, 0.0788, 0.0768, 0.0570], + device='cuda:0'), in_proj_covar=tensor([0.0188, 0.0204, 0.0187, 0.0175, 0.0179, 0.0180, 0.0152, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 21:16:29,340 INFO [finetune.py:976] (0/7) Epoch 24, batch 5200, loss[loss=0.1827, simple_loss=0.2622, pruned_loss=0.0516, over 4910.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2413, pruned_loss=0.04785, over 952594.85 frames. ], batch size: 37, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:16:59,706 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.230e+02 1.748e+02 2.026e+02 2.510e+02 4.483e+02, threshold=4.051e+02, percent-clipped=3.0 +2023-04-27 21:17:12,983 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5999, 1.6380, 0.6231, 1.2987, 1.6440, 1.4602, 1.3651, 1.4131], + device='cuda:0'), covar=tensor([0.0485, 0.0339, 0.0356, 0.0518, 0.0276, 0.0500, 0.0447, 0.0549], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 21:17:22,716 INFO [finetune.py:976] (0/7) Epoch 24, batch 5250, loss[loss=0.1533, simple_loss=0.2355, pruned_loss=0.03555, over 4759.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2442, pruned_loss=0.04851, over 952308.39 frames. ], batch size: 27, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:17:25,230 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=136991.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:17:36,060 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=136999.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:17:59,132 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9058, 2.4353, 1.8862, 1.7160, 1.3040, 1.4017, 2.0241, 1.3072], + device='cuda:0'), covar=tensor([0.1662, 0.1384, 0.1428, 0.1808, 0.2350, 0.1987, 0.0927, 0.2110], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0211, 0.0169, 0.0205, 0.0200, 0.0186, 0.0157, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 21:18:00,483 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 21:18:10,123 INFO [finetune.py:976] (0/7) Epoch 24, batch 5300, loss[loss=0.1659, simple_loss=0.2443, pruned_loss=0.04373, over 4879.00 frames. ], tot_loss[loss=0.173, simple_loss=0.2466, pruned_loss=0.04972, over 951768.33 frames. ], batch size: 31, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:18:11,864 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=137039.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:18:19,422 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-27 21:18:30,934 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.102e+02 1.584e+02 1.901e+02 2.207e+02 4.060e+02, threshold=3.801e+02, percent-clipped=1.0 +2023-04-27 21:18:41,671 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6939, 4.0537, 0.8676, 2.1626, 2.2113, 2.7509, 2.3170, 1.1030], + device='cuda:0'), covar=tensor([0.1338, 0.0949, 0.2099, 0.1172, 0.0971, 0.1020, 0.1473, 0.2061], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0236, 0.0135, 0.0120, 0.0131, 0.0151, 0.0117, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 21:18:44,060 INFO [finetune.py:976] (0/7) Epoch 24, batch 5350, loss[loss=0.1092, simple_loss=0.1776, pruned_loss=0.02038, over 4417.00 frames. ], tot_loss[loss=0.1735, simple_loss=0.2472, pruned_loss=0.04995, over 950289.10 frames. ], batch size: 19, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:18:49,518 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3835, 3.2467, 0.9730, 1.7902, 1.8243, 2.4413, 1.8721, 1.1012], + device='cuda:0'), covar=tensor([0.1306, 0.0775, 0.1930, 0.1173, 0.1009, 0.0933, 0.1394, 0.1987], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0236, 0.0135, 0.0120, 0.0131, 0.0151, 0.0117, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 21:19:01,534 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.29 vs. limit=5.0 +2023-04-27 21:19:03,688 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4357, 0.9528, 0.5903, 1.1804, 1.0528, 1.3300, 1.2587, 1.2333], + device='cuda:0'), covar=tensor([0.0487, 0.0385, 0.0349, 0.0550, 0.0306, 0.0473, 0.0460, 0.0558], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 21:19:16,914 INFO [finetune.py:976] (0/7) Epoch 24, batch 5400, loss[loss=0.2021, simple_loss=0.2709, pruned_loss=0.0667, over 4889.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.2443, pruned_loss=0.04923, over 951684.34 frames. ], batch size: 32, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:19:33,099 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=137162.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:19:37,581 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.430e+02 1.764e+02 2.146e+02 4.768e+02, threshold=3.527e+02, percent-clipped=1.0 +2023-04-27 21:19:50,717 INFO [finetune.py:976] (0/7) Epoch 24, batch 5450, loss[loss=0.1731, simple_loss=0.2356, pruned_loss=0.05528, over 4834.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2407, pruned_loss=0.04778, over 952490.60 frames. ], batch size: 30, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:20:23,615 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2996, 1.1891, 1.5622, 1.5385, 1.2500, 1.0367, 1.1522, 0.6630], + device='cuda:0'), covar=tensor([0.0541, 0.0614, 0.0409, 0.0628, 0.0645, 0.1328, 0.0600, 0.0680], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0066, 0.0068, 0.0075, 0.0095, 0.0073, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 21:20:24,728 INFO [finetune.py:976] (0/7) Epoch 24, batch 5500, loss[loss=0.2112, simple_loss=0.2594, pruned_loss=0.08143, over 4522.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2374, pruned_loss=0.04711, over 952382.35 frames. ], batch size: 20, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:20:44,122 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.686e+01 1.504e+02 1.776e+02 2.176e+02 3.969e+02, threshold=3.551e+02, percent-clipped=4.0 +2023-04-27 21:20:57,635 INFO [finetune.py:976] (0/7) Epoch 24, batch 5550, loss[loss=0.1966, simple_loss=0.2811, pruned_loss=0.05604, over 4832.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2412, pruned_loss=0.04939, over 951661.40 frames. ], batch size: 40, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:21:05,052 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=137299.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:21:25,679 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3063, 1.1177, 3.7705, 3.5406, 3.3395, 3.5955, 3.5811, 3.2960], + device='cuda:0'), covar=tensor([0.6927, 0.6072, 0.1076, 0.1564, 0.1152, 0.2058, 0.2024, 0.1642], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0305, 0.0404, 0.0405, 0.0348, 0.0407, 0.0317, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 21:21:34,762 INFO [finetune.py:976] (0/7) Epoch 24, batch 5600, loss[loss=0.1523, simple_loss=0.2362, pruned_loss=0.0342, over 4830.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2438, pruned_loss=0.04946, over 952212.51 frames. ], batch size: 47, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:21:46,036 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=137347.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:22:14,773 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.541e+02 1.789e+02 2.150e+02 5.679e+02, threshold=3.578e+02, percent-clipped=2.0 +2023-04-27 21:22:16,098 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0714, 2.3676, 2.1147, 2.3584, 1.6879, 2.0069, 2.1565, 1.6500], + device='cuda:0'), covar=tensor([0.1181, 0.0850, 0.0669, 0.0724, 0.2832, 0.0904, 0.1279, 0.1859], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0298, 0.0214, 0.0275, 0.0311, 0.0254, 0.0249, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1323e-04, 1.1747e-04, 8.4193e-05, 1.0841e-04, 1.2539e-04, 9.9963e-05, + 1.0035e-04, 1.0390e-04], device='cuda:0') +2023-04-27 21:22:37,497 INFO [finetune.py:976] (0/7) Epoch 24, batch 5650, loss[loss=0.1847, simple_loss=0.2572, pruned_loss=0.05608, over 4825.00 frames. ], tot_loss[loss=0.1723, simple_loss=0.2451, pruned_loss=0.04978, over 951599.43 frames. ], batch size: 33, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:22:41,151 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-27 21:23:20,647 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4703, 2.5478, 2.1788, 2.1753, 2.3798, 1.8727, 3.1463, 1.6991], + device='cuda:0'), covar=tensor([0.3377, 0.1763, 0.3930, 0.2484, 0.1834, 0.2848, 0.1025, 0.4343], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0350, 0.0424, 0.0348, 0.0376, 0.0374, 0.0365, 0.0420], + device='cuda:0'), out_proj_covar=tensor([9.9757e-05, 1.0445e-04, 1.2845e-04, 1.0461e-04, 1.1158e-04, 1.1120e-04, + 1.0724e-04, 1.2624e-04], device='cuda:0') +2023-04-27 21:23:28,789 INFO [finetune.py:976] (0/7) Epoch 24, batch 5700, loss[loss=0.1459, simple_loss=0.2061, pruned_loss=0.04281, over 4156.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2406, pruned_loss=0.04844, over 936073.84 frames. ], batch size: 18, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:23:43,807 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=137462.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:23:45,808 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-24.pt +2023-04-27 21:23:57,746 INFO [finetune.py:976] (0/7) Epoch 25, batch 0, loss[loss=0.1472, simple_loss=0.2292, pruned_loss=0.03265, over 4828.00 frames. ], tot_loss[loss=0.1472, simple_loss=0.2292, pruned_loss=0.03265, over 4828.00 frames. ], batch size: 30, lr: 3.03e-03, grad_scale: 32.0 +2023-04-27 21:23:57,747 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 21:24:03,407 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3784, 3.4856, 2.5474, 3.8542, 3.5347, 3.4531, 1.5769, 3.3773], + device='cuda:0'), covar=tensor([0.1763, 0.1411, 0.2977, 0.2118, 0.2789, 0.1752, 0.5324, 0.2429], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0220, 0.0253, 0.0308, 0.0299, 0.0248, 0.0277, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:24:08,111 INFO [finetune.py:1010] (0/7) Epoch 25, validation: loss=0.155, simple_loss=0.224, pruned_loss=0.04295, over 2265189.00 frames. +2023-04-27 21:24:08,112 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 21:24:09,917 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.060e+01 1.467e+02 1.816e+02 2.337e+02 4.208e+02, threshold=3.632e+02, percent-clipped=2.0 +2023-04-27 21:24:37,393 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=137510.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:24:40,447 INFO [finetune.py:976] (0/7) Epoch 25, batch 50, loss[loss=0.1568, simple_loss=0.2263, pruned_loss=0.04366, over 4815.00 frames. ], tot_loss[loss=0.1754, simple_loss=0.2474, pruned_loss=0.05167, over 213717.22 frames. ], batch size: 33, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:24:41,795 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7515, 1.7544, 0.9071, 1.5091, 1.9131, 1.5816, 1.5197, 1.6605], + device='cuda:0'), covar=tensor([0.0450, 0.0356, 0.0317, 0.0533, 0.0245, 0.0491, 0.0485, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0052, 0.0038, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 21:24:44,132 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.18 vs. limit=5.0 +2023-04-27 21:24:55,465 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1375, 2.5980, 1.0033, 1.4609, 2.0462, 1.2166, 3.4963, 1.8145], + device='cuda:0'), covar=tensor([0.0643, 0.0559, 0.0767, 0.1230, 0.0480, 0.1001, 0.0220, 0.0578], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0063, 0.0047, 0.0046, 0.0048, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0009, 0.0007, 0.0007, 0.0007, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 21:24:57,769 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5477, 4.0785, 0.8445, 2.1186, 2.0750, 2.7409, 2.4465, 0.9607], + device='cuda:0'), covar=tensor([0.1830, 0.1559, 0.2558, 0.1671, 0.1356, 0.1372, 0.1667, 0.2440], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0236, 0.0135, 0.0119, 0.0131, 0.0151, 0.0117, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 21:24:59,599 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7164, 1.4613, 1.3595, 1.6734, 1.9165, 1.6222, 1.4689, 1.2356], + device='cuda:0'), covar=tensor([0.1609, 0.1432, 0.1750, 0.1134, 0.0867, 0.1431, 0.1958, 0.2169], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0304, 0.0348, 0.0282, 0.0324, 0.0302, 0.0296, 0.0369], + device='cuda:0'), out_proj_covar=tensor([6.3359e-05, 6.2663e-05, 7.3322e-05, 5.6520e-05, 6.6681e-05, 6.3163e-05, + 6.1477e-05, 7.8147e-05], device='cuda:0') +2023-04-27 21:25:13,440 INFO [finetune.py:976] (0/7) Epoch 25, batch 100, loss[loss=0.1457, simple_loss=0.2134, pruned_loss=0.03894, over 4841.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2389, pruned_loss=0.04937, over 378264.75 frames. ], batch size: 44, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:25:15,237 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.734e+01 1.567e+02 1.856e+02 2.214e+02 3.687e+02, threshold=3.711e+02, percent-clipped=3.0 +2023-04-27 21:25:46,405 INFO [finetune.py:976] (0/7) Epoch 25, batch 150, loss[loss=0.1875, simple_loss=0.2464, pruned_loss=0.06431, over 4813.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2376, pruned_loss=0.0497, over 506708.87 frames. ], batch size: 45, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:26:14,479 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-27 21:26:20,276 INFO [finetune.py:976] (0/7) Epoch 25, batch 200, loss[loss=0.1297, simple_loss=0.2026, pruned_loss=0.02839, over 4000.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2351, pruned_loss=0.04768, over 604532.42 frames. ], batch size: 17, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:26:22,073 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.527e+02 1.776e+02 2.195e+02 3.612e+02, threshold=3.551e+02, percent-clipped=0.0 +2023-04-27 21:27:09,386 INFO [finetune.py:976] (0/7) Epoch 25, batch 250, loss[loss=0.1914, simple_loss=0.2602, pruned_loss=0.06135, over 4927.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2398, pruned_loss=0.04916, over 683145.96 frames. ], batch size: 33, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:27:18,675 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2199, 1.4177, 1.6920, 1.8028, 1.6945, 1.7370, 1.7373, 1.6943], + device='cuda:0'), covar=tensor([0.3631, 0.5132, 0.4004, 0.4094, 0.5147, 0.6723, 0.4690, 0.4515], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0372, 0.0324, 0.0338, 0.0348, 0.0392, 0.0356, 0.0329], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:28:04,798 INFO [finetune.py:976] (0/7) Epoch 25, batch 300, loss[loss=0.1786, simple_loss=0.2556, pruned_loss=0.05075, over 4774.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.2452, pruned_loss=0.05019, over 743324.73 frames. ], batch size: 28, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:28:05,549 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7202, 1.3712, 1.8466, 2.2340, 1.8078, 1.6637, 1.7625, 1.7226], + device='cuda:0'), covar=tensor([0.4612, 0.7219, 0.6643, 0.5606, 0.6104, 0.8263, 0.8354, 0.9747], + device='cuda:0'), in_proj_covar=tensor([0.0436, 0.0420, 0.0512, 0.0508, 0.0466, 0.0500, 0.0503, 0.0516], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 21:28:06,622 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.558e+02 1.872e+02 2.368e+02 4.247e+02, threshold=3.743e+02, percent-clipped=2.0 +2023-04-27 21:28:43,570 INFO [finetune.py:976] (0/7) Epoch 25, batch 350, loss[loss=0.2016, simple_loss=0.2662, pruned_loss=0.06852, over 4902.00 frames. ], tot_loss[loss=0.1743, simple_loss=0.2469, pruned_loss=0.05086, over 790918.60 frames. ], batch size: 36, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:29:09,286 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-27 21:29:21,924 INFO [finetune.py:976] (0/7) Epoch 25, batch 400, loss[loss=0.1394, simple_loss=0.2178, pruned_loss=0.03045, over 4208.00 frames. ], tot_loss[loss=0.1732, simple_loss=0.2464, pruned_loss=0.04999, over 827778.24 frames. ], batch size: 66, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:29:29,041 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.177e+02 1.617e+02 1.875e+02 2.197e+02 4.707e+02, threshold=3.750e+02, percent-clipped=1.0 +2023-04-27 21:30:01,122 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137892.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:30:02,929 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=137895.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:30:27,176 INFO [finetune.py:976] (0/7) Epoch 25, batch 450, loss[loss=0.1607, simple_loss=0.2348, pruned_loss=0.04332, over 4828.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.2442, pruned_loss=0.049, over 857982.76 frames. ], batch size: 38, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:30:28,482 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4690, 3.0572, 0.9720, 1.7487, 2.3450, 1.5415, 4.4463, 1.9938], + device='cuda:0'), covar=tensor([0.0712, 0.0763, 0.0876, 0.1337, 0.0540, 0.1073, 0.0340, 0.0657], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0048, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0007, 0.0007, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 21:31:27,310 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137953.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:31:33,948 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=137956.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:31:44,821 INFO [finetune.py:976] (0/7) Epoch 25, batch 500, loss[loss=0.1777, simple_loss=0.2576, pruned_loss=0.04891, over 4812.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2422, pruned_loss=0.04829, over 879848.01 frames. ], batch size: 39, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:31:46,605 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.991e+01 1.587e+02 1.886e+02 2.282e+02 3.867e+02, threshold=3.771e+02, percent-clipped=1.0 +2023-04-27 21:32:19,353 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-138000.pt +2023-04-27 21:32:25,634 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-27 21:32:27,213 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4780, 1.4651, 1.9521, 1.9837, 1.4529, 1.2957, 1.5434, 1.0074], + device='cuda:0'), covar=tensor([0.0637, 0.0665, 0.0354, 0.0587, 0.0797, 0.1112, 0.0654, 0.0590], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0066, 0.0068, 0.0075, 0.0096, 0.0073, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 21:32:30,091 INFO [finetune.py:976] (0/7) Epoch 25, batch 550, loss[loss=0.1742, simple_loss=0.242, pruned_loss=0.05323, over 4787.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2384, pruned_loss=0.04753, over 897881.98 frames. ], batch size: 29, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:33:20,386 INFO [finetune.py:976] (0/7) Epoch 25, batch 600, loss[loss=0.2011, simple_loss=0.2726, pruned_loss=0.06483, over 4817.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2371, pruned_loss=0.04657, over 911902.81 frames. ], batch size: 38, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:33:22,210 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.469e+02 1.786e+02 2.001e+02 2.950e+02, threshold=3.571e+02, percent-clipped=0.0 +2023-04-27 21:33:39,977 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.7775, 1.6899, 1.6101, 1.2953, 1.8007, 1.4988, 2.2488, 1.4147], + device='cuda:0'), covar=tensor([0.3490, 0.1806, 0.4978, 0.2876, 0.1776, 0.2450, 0.1443, 0.4950], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0351, 0.0428, 0.0351, 0.0379, 0.0377, 0.0368, 0.0421], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 21:33:53,153 INFO [finetune.py:976] (0/7) Epoch 25, batch 650, loss[loss=0.1582, simple_loss=0.224, pruned_loss=0.04618, over 4813.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2406, pruned_loss=0.04744, over 921455.55 frames. ], batch size: 25, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:34:10,468 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8675, 1.4723, 1.4935, 1.5861, 2.0122, 1.6867, 1.4513, 1.4047], + device='cuda:0'), covar=tensor([0.1657, 0.1398, 0.2125, 0.1355, 0.0877, 0.1340, 0.1737, 0.2127], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0306, 0.0349, 0.0283, 0.0324, 0.0303, 0.0297, 0.0370], + device='cuda:0'), out_proj_covar=tensor([6.3625e-05, 6.3003e-05, 7.3522e-05, 5.6712e-05, 6.6559e-05, 6.3385e-05, + 6.1639e-05, 7.8331e-05], device='cuda:0') +2023-04-27 21:34:26,503 INFO [finetune.py:976] (0/7) Epoch 25, batch 700, loss[loss=0.1617, simple_loss=0.2297, pruned_loss=0.04683, over 4760.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2413, pruned_loss=0.04732, over 929026.71 frames. ], batch size: 27, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:34:28,314 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.080e+02 1.605e+02 1.838e+02 2.214e+02 4.494e+02, threshold=3.677e+02, percent-clipped=3.0 +2023-04-27 21:35:14,705 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-27 21:35:26,623 INFO [finetune.py:976] (0/7) Epoch 25, batch 750, loss[loss=0.1401, simple_loss=0.2268, pruned_loss=0.02666, over 4746.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2422, pruned_loss=0.04756, over 935154.15 frames. ], batch size: 27, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:36:09,233 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138248.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:36:16,831 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138251.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:36:25,762 INFO [finetune.py:976] (0/7) Epoch 25, batch 800, loss[loss=0.1644, simple_loss=0.248, pruned_loss=0.04043, over 4865.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2421, pruned_loss=0.04734, over 939363.80 frames. ], batch size: 44, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:36:27,579 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.037e+01 1.577e+02 1.890e+02 2.276e+02 6.092e+02, threshold=3.780e+02, percent-clipped=1.0 +2023-04-27 21:36:28,894 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138270.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:36:41,910 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-27 21:37:09,964 INFO [finetune.py:976] (0/7) Epoch 25, batch 850, loss[loss=0.1699, simple_loss=0.2404, pruned_loss=0.04971, over 4870.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2413, pruned_loss=0.04723, over 944710.05 frames. ], batch size: 34, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:37:14,319 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138322.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:37:15,577 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9022, 2.1802, 2.0884, 2.2916, 2.0695, 2.1328, 2.1541, 2.0907], + device='cuda:0'), covar=tensor([0.4261, 0.6005, 0.4996, 0.4426, 0.5671, 0.7105, 0.6075, 0.5623], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0375, 0.0327, 0.0341, 0.0350, 0.0394, 0.0358, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:37:24,957 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138331.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:38:00,618 INFO [finetune.py:976] (0/7) Epoch 25, batch 900, loss[loss=0.1754, simple_loss=0.237, pruned_loss=0.05691, over 4779.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2386, pruned_loss=0.04633, over 948060.31 frames. ], batch size: 28, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:38:02,475 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.023e+01 1.492e+02 1.759e+02 2.095e+02 3.711e+02, threshold=3.518e+02, percent-clipped=0.0 +2023-04-27 21:38:20,779 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7037, 1.6182, 0.9267, 1.3914, 1.8620, 1.5697, 1.4754, 1.5585], + device='cuda:0'), covar=tensor([0.0492, 0.0390, 0.0317, 0.0554, 0.0249, 0.0501, 0.0502, 0.0573], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0039, 0.0053, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 21:38:21,999 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138383.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:38:54,301 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2856, 1.6355, 2.1036, 2.5608, 2.3011, 1.7079, 1.4549, 2.0584], + device='cuda:0'), covar=tensor([0.3807, 0.3949, 0.2003, 0.2834, 0.2715, 0.2981, 0.4475, 0.2134], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0246, 0.0228, 0.0314, 0.0222, 0.0234, 0.0228, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 21:39:01,961 INFO [finetune.py:976] (0/7) Epoch 25, batch 950, loss[loss=0.1816, simple_loss=0.2638, pruned_loss=0.04967, over 4829.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2378, pruned_loss=0.04643, over 950446.78 frames. ], batch size: 39, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:39:13,741 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8895, 1.6071, 2.0572, 2.2694, 1.7465, 1.5244, 1.7063, 1.1414], + device='cuda:0'), covar=tensor([0.0424, 0.0664, 0.0368, 0.0480, 0.0642, 0.1109, 0.0608, 0.0636], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0066, 0.0068, 0.0075, 0.0096, 0.0073, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 21:40:06,147 INFO [finetune.py:976] (0/7) Epoch 25, batch 1000, loss[loss=0.1649, simple_loss=0.2503, pruned_loss=0.03976, over 4812.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2397, pruned_loss=0.04723, over 952595.94 frames. ], batch size: 38, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:40:07,975 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.116e+02 1.516e+02 1.776e+02 2.068e+02 3.891e+02, threshold=3.551e+02, percent-clipped=2.0 +2023-04-27 21:40:15,854 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3752, 1.6409, 1.8097, 1.9512, 1.7624, 1.7717, 1.8662, 1.8074], + device='cuda:0'), covar=tensor([0.3806, 0.5002, 0.3951, 0.3992, 0.5296, 0.6859, 0.4586, 0.4421], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0375, 0.0327, 0.0341, 0.0350, 0.0394, 0.0358, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:41:09,819 INFO [finetune.py:976] (0/7) Epoch 25, batch 1050, loss[loss=0.1463, simple_loss=0.2176, pruned_loss=0.03753, over 4158.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.241, pruned_loss=0.04727, over 953568.21 frames. ], batch size: 18, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:41:12,396 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9375, 2.2087, 2.1268, 2.9030, 2.8766, 2.5732, 2.5548, 2.1154], + device='cuda:0'), covar=tensor([0.1494, 0.1591, 0.1565, 0.1267, 0.0920, 0.1450, 0.1896, 0.1932], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0304, 0.0346, 0.0281, 0.0322, 0.0301, 0.0295, 0.0367], + device='cuda:0'), out_proj_covar=tensor([6.3098e-05, 6.2590e-05, 7.2944e-05, 5.6225e-05, 6.6104e-05, 6.3151e-05, + 6.1318e-05, 7.7869e-05], device='cuda:0') +2023-04-27 21:41:52,283 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138548.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:41:54,994 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138551.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:42:14,665 INFO [finetune.py:976] (0/7) Epoch 25, batch 1100, loss[loss=0.2029, simple_loss=0.2716, pruned_loss=0.0671, over 4836.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2437, pruned_loss=0.04826, over 954795.14 frames. ], batch size: 47, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:42:16,467 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.782e+01 1.591e+02 1.867e+02 2.331e+02 5.511e+02, threshold=3.734e+02, percent-clipped=3.0 +2023-04-27 21:42:55,895 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=138596.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:42:57,699 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=138599.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:43:18,596 INFO [finetune.py:976] (0/7) Epoch 25, batch 1150, loss[loss=0.1897, simple_loss=0.261, pruned_loss=0.05921, over 4910.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2451, pruned_loss=0.04871, over 954181.38 frames. ], batch size: 33, lr: 3.02e-03, grad_scale: 32.0 +2023-04-27 21:43:38,274 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138626.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:44:21,148 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-27 21:44:23,366 INFO [finetune.py:976] (0/7) Epoch 25, batch 1200, loss[loss=0.1743, simple_loss=0.2533, pruned_loss=0.0477, over 4822.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2431, pruned_loss=0.04768, over 954457.33 frames. ], batch size: 33, lr: 3.02e-03, grad_scale: 64.0 +2023-04-27 21:44:26,070 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.080e+02 1.537e+02 1.836e+02 2.355e+02 3.778e+02, threshold=3.672e+02, percent-clipped=1.0 +2023-04-27 21:44:34,556 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 21:44:43,174 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=138678.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:44:44,976 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138681.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:44:53,115 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138686.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:45:04,520 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8322, 1.4958, 1.4490, 1.5577, 1.9935, 1.6131, 1.4458, 1.3425], + device='cuda:0'), covar=tensor([0.1677, 0.1235, 0.1747, 0.1349, 0.0730, 0.1611, 0.1834, 0.2208], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0307, 0.0350, 0.0284, 0.0325, 0.0305, 0.0299, 0.0371], + device='cuda:0'), out_proj_covar=tensor([6.3804e-05, 6.3071e-05, 7.3778e-05, 5.6973e-05, 6.6833e-05, 6.3873e-05, + 6.2012e-05, 7.8693e-05], device='cuda:0') +2023-04-27 21:45:05,115 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138697.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:45:26,957 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1197, 1.9063, 2.3139, 2.5610, 2.1528, 2.0686, 2.2443, 2.1998], + device='cuda:0'), covar=tensor([0.4748, 0.7285, 0.7289, 0.5223, 0.6094, 0.8589, 0.8518, 0.8454], + device='cuda:0'), in_proj_covar=tensor([0.0437, 0.0421, 0.0512, 0.0507, 0.0466, 0.0501, 0.0502, 0.0516], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 21:45:29,240 INFO [finetune.py:976] (0/7) Epoch 25, batch 1250, loss[loss=0.1696, simple_loss=0.2264, pruned_loss=0.0564, over 4868.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2412, pruned_loss=0.04817, over 955227.36 frames. ], batch size: 31, lr: 3.02e-03, grad_scale: 64.0 +2023-04-27 21:45:48,015 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8405, 1.8428, 0.9720, 1.5354, 1.8613, 1.6715, 1.6154, 1.6898], + device='cuda:0'), covar=tensor([0.0452, 0.0345, 0.0328, 0.0506, 0.0247, 0.0458, 0.0435, 0.0503], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0038, 0.0053, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 21:46:02,411 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138742.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:46:11,058 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138747.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:46:21,267 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 21:46:24,000 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138758.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:46:33,341 INFO [finetune.py:976] (0/7) Epoch 25, batch 1300, loss[loss=0.1752, simple_loss=0.2389, pruned_loss=0.05574, over 4914.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2389, pruned_loss=0.04791, over 954988.61 frames. ], batch size: 36, lr: 3.02e-03, grad_scale: 64.0 +2023-04-27 21:46:40,296 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 1.540e+02 1.806e+02 2.216e+02 3.604e+02, threshold=3.612e+02, percent-clipped=0.0 +2023-04-27 21:47:44,176 INFO [finetune.py:976] (0/7) Epoch 25, batch 1350, loss[loss=0.1853, simple_loss=0.2684, pruned_loss=0.05114, over 4938.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2387, pruned_loss=0.0478, over 953956.68 frames. ], batch size: 38, lr: 3.02e-03, grad_scale: 64.0 +2023-04-27 21:48:48,892 INFO [finetune.py:976] (0/7) Epoch 25, batch 1400, loss[loss=0.2006, simple_loss=0.2592, pruned_loss=0.07102, over 4776.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2406, pruned_loss=0.04833, over 952644.99 frames. ], batch size: 26, lr: 3.02e-03, grad_scale: 64.0 +2023-04-27 21:48:50,719 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.518e+02 1.853e+02 2.271e+02 4.217e+02, threshold=3.705e+02, percent-clipped=3.0 +2023-04-27 21:49:44,978 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=138909.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:49:54,328 INFO [finetune.py:976] (0/7) Epoch 25, batch 1450, loss[loss=0.1838, simple_loss=0.2512, pruned_loss=0.05826, over 4813.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2432, pruned_loss=0.04869, over 953723.42 frames. ], batch size: 38, lr: 3.01e-03, grad_scale: 64.0 +2023-04-27 21:50:07,855 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138926.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:50:39,507 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3291, 1.3410, 1.4257, 1.5508, 1.5879, 1.2804, 0.8949, 1.4636], + device='cuda:0'), covar=tensor([0.0779, 0.1264, 0.0842, 0.0587, 0.0697, 0.0802, 0.0862, 0.0605], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0200, 0.0185, 0.0172, 0.0178, 0.0176, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 21:51:01,101 INFO [finetune.py:976] (0/7) Epoch 25, batch 1500, loss[loss=0.1952, simple_loss=0.2643, pruned_loss=0.06308, over 4817.00 frames. ], tot_loss[loss=0.1712, simple_loss=0.2446, pruned_loss=0.04888, over 953187.98 frames. ], batch size: 38, lr: 3.01e-03, grad_scale: 64.0 +2023-04-27 21:51:03,880 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.504e+02 1.740e+02 2.143e+02 4.195e+02, threshold=3.481e+02, percent-clipped=2.0 +2023-04-27 21:51:10,080 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=138970.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 21:51:12,527 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=138974.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:51:15,007 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=138978.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:51:34,810 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7612, 2.5191, 1.8442, 2.0356, 1.5313, 1.5149, 1.8592, 1.4563], + device='cuda:0'), covar=tensor([0.1674, 0.1450, 0.1570, 0.1573, 0.2260, 0.2052, 0.0935, 0.1995], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0209, 0.0168, 0.0203, 0.0199, 0.0186, 0.0155, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 21:52:02,849 INFO [finetune.py:976] (0/7) Epoch 25, batch 1550, loss[loss=0.1381, simple_loss=0.208, pruned_loss=0.03412, over 4927.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2439, pruned_loss=0.04867, over 952403.01 frames. ], batch size: 42, lr: 3.01e-03, grad_scale: 64.0 +2023-04-27 21:52:11,150 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=139026.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:52:18,844 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139037.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:52:22,327 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139042.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:52:29,086 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139053.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:52:41,926 INFO [finetune.py:976] (0/7) Epoch 25, batch 1600, loss[loss=0.2203, simple_loss=0.2869, pruned_loss=0.07684, over 4252.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2411, pruned_loss=0.0482, over 953171.39 frames. ], batch size: 65, lr: 3.01e-03, grad_scale: 64.0 +2023-04-27 21:52:44,232 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.576e+02 1.832e+02 2.139e+02 4.092e+02, threshold=3.665e+02, percent-clipped=3.0 +2023-04-27 21:53:26,148 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.17 vs. limit=5.0 +2023-04-27 21:53:47,324 INFO [finetune.py:976] (0/7) Epoch 25, batch 1650, loss[loss=0.1447, simple_loss=0.2126, pruned_loss=0.03837, over 4832.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2374, pruned_loss=0.04669, over 952514.79 frames. ], batch size: 25, lr: 3.01e-03, grad_scale: 64.0 +2023-04-27 21:54:01,792 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4118, 1.6433, 1.8481, 1.9585, 1.8612, 1.9304, 1.8805, 1.8426], + device='cuda:0'), covar=tensor([0.3852, 0.5809, 0.4533, 0.4520, 0.5579, 0.6913, 0.5704, 0.4766], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0375, 0.0326, 0.0340, 0.0349, 0.0393, 0.0358, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:54:33,489 INFO [finetune.py:976] (0/7) Epoch 25, batch 1700, loss[loss=0.1754, simple_loss=0.2334, pruned_loss=0.05869, over 4901.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2353, pruned_loss=0.04573, over 953268.23 frames. ], batch size: 32, lr: 3.01e-03, grad_scale: 64.0 +2023-04-27 21:54:35,337 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.004e+02 1.477e+02 1.739e+02 2.189e+02 4.097e+02, threshold=3.477e+02, percent-clipped=1.0 +2023-04-27 21:54:36,778 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-27 21:55:07,049 INFO [finetune.py:976] (0/7) Epoch 25, batch 1750, loss[loss=0.181, simple_loss=0.2627, pruned_loss=0.04968, over 4751.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2382, pruned_loss=0.04679, over 953727.64 frames. ], batch size: 54, lr: 3.01e-03, grad_scale: 64.0 +2023-04-27 21:55:23,347 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139238.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:55:39,583 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 21:55:41,181 INFO [finetune.py:976] (0/7) Epoch 25, batch 1800, loss[loss=0.1464, simple_loss=0.2339, pruned_loss=0.02945, over 4818.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2401, pruned_loss=0.04699, over 951849.02 frames. ], batch size: 40, lr: 3.01e-03, grad_scale: 64.0 +2023-04-27 21:55:41,252 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139265.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 21:55:42,989 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.657e+01 1.551e+02 1.902e+02 2.363e+02 3.513e+02, threshold=3.803e+02, percent-clipped=1.0 +2023-04-27 21:55:48,016 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139276.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:55:57,461 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139289.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:56:04,586 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139299.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:56:14,572 INFO [finetune.py:976] (0/7) Epoch 25, batch 1850, loss[loss=0.2045, simple_loss=0.2835, pruned_loss=0.06275, over 4818.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2425, pruned_loss=0.04759, over 954717.42 frames. ], batch size: 40, lr: 3.01e-03, grad_scale: 64.0 +2023-04-27 21:56:16,535 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5409, 1.0606, 0.4429, 1.2126, 1.1686, 1.3873, 1.3240, 1.3173], + device='cuda:0'), covar=tensor([0.0478, 0.0399, 0.0393, 0.0552, 0.0288, 0.0500, 0.0488, 0.0552], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0046, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 21:56:29,233 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139337.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:56:29,260 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139337.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 21:56:33,182 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139342.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:56:43,884 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139350.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:56:51,924 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139353.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:57:05,213 INFO [finetune.py:976] (0/7) Epoch 25, batch 1900, loss[loss=0.1738, simple_loss=0.2538, pruned_loss=0.04685, over 4787.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2435, pruned_loss=0.04821, over 952606.07 frames. ], batch size: 25, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 21:57:07,684 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.239e+02 1.510e+02 1.790e+02 2.163e+02 4.429e+02, threshold=3.581e+02, percent-clipped=1.0 +2023-04-27 21:57:28,905 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=139385.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:57:36,774 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=139390.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:57:49,497 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=139401.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 21:58:09,750 INFO [finetune.py:976] (0/7) Epoch 25, batch 1950, loss[loss=0.1833, simple_loss=0.2501, pruned_loss=0.05831, over 4814.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2419, pruned_loss=0.04772, over 950540.83 frames. ], batch size: 25, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 21:58:32,554 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2702, 1.4062, 1.7419, 1.9022, 1.7740, 1.8382, 1.7767, 1.8070], + device='cuda:0'), covar=tensor([0.3681, 0.5070, 0.4004, 0.4072, 0.5215, 0.6678, 0.4731, 0.4334], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0375, 0.0327, 0.0340, 0.0350, 0.0393, 0.0358, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:58:33,079 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8990, 3.8444, 3.0022, 4.4586, 3.8175, 3.9127, 1.9528, 3.8626], + device='cuda:0'), covar=tensor([0.1885, 0.1273, 0.3561, 0.1841, 0.3431, 0.1899, 0.5772, 0.2613], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0217, 0.0249, 0.0304, 0.0296, 0.0247, 0.0273, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 21:59:13,359 INFO [finetune.py:976] (0/7) Epoch 25, batch 2000, loss[loss=0.1226, simple_loss=0.193, pruned_loss=0.02612, over 4060.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2395, pruned_loss=0.0469, over 950647.85 frames. ], batch size: 17, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 21:59:15,797 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.572e+02 1.799e+02 2.251e+02 3.942e+02, threshold=3.599e+02, percent-clipped=2.0 +2023-04-27 22:00:17,502 INFO [finetune.py:976] (0/7) Epoch 25, batch 2050, loss[loss=0.1321, simple_loss=0.2036, pruned_loss=0.03028, over 4829.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2367, pruned_loss=0.04622, over 951659.39 frames. ], batch size: 25, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 22:00:37,195 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-27 22:00:41,950 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4049, 3.1128, 1.1082, 1.7431, 1.7155, 2.1665, 1.8055, 1.0278], + device='cuda:0'), covar=tensor([0.1307, 0.0899, 0.1738, 0.1192, 0.1039, 0.1035, 0.1469, 0.1809], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0237, 0.0136, 0.0120, 0.0131, 0.0151, 0.0116, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 22:01:21,673 INFO [finetune.py:976] (0/7) Epoch 25, batch 2100, loss[loss=0.1767, simple_loss=0.2577, pruned_loss=0.04782, over 4807.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2376, pruned_loss=0.04714, over 953055.05 frames. ], batch size: 45, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 22:01:21,758 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139565.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 22:01:24,121 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.088e+01 1.541e+02 1.795e+02 2.157e+02 3.840e+02, threshold=3.589e+02, percent-clipped=1.0 +2023-04-27 22:01:30,503 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139571.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:01:37,248 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139582.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:01:44,582 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139594.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:01:57,936 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=139613.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:01:59,120 INFO [finetune.py:976] (0/7) Epoch 25, batch 2150, loss[loss=0.1771, simple_loss=0.2622, pruned_loss=0.04601, over 4831.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2406, pruned_loss=0.04796, over 953777.41 frames. ], batch size: 51, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 22:02:10,544 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139632.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 22:02:10,579 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139632.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:02:17,293 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139643.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:02:18,460 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139645.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:02:22,726 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.1572, 2.8263, 3.0548, 3.5091, 3.5216, 2.9899, 2.5222, 3.2964], + device='cuda:0'), covar=tensor([0.0663, 0.0882, 0.0570, 0.0489, 0.0429, 0.0759, 0.0680, 0.0469], + device='cuda:0'), in_proj_covar=tensor([0.0187, 0.0204, 0.0187, 0.0174, 0.0180, 0.0179, 0.0152, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:02:31,939 INFO [finetune.py:976] (0/7) Epoch 25, batch 2200, loss[loss=0.2474, simple_loss=0.3163, pruned_loss=0.0892, over 4893.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2424, pruned_loss=0.04866, over 953148.00 frames. ], batch size: 35, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 22:02:34,823 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.529e+02 1.754e+02 2.258e+02 5.468e+02, threshold=3.509e+02, percent-clipped=4.0 +2023-04-27 22:02:42,095 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6629, 3.0521, 1.0997, 1.8104, 2.4800, 1.6847, 4.3339, 2.2488], + device='cuda:0'), covar=tensor([0.0592, 0.0700, 0.0848, 0.1222, 0.0483, 0.0949, 0.0183, 0.0546], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-27 22:02:50,734 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.52 vs. limit=2.0 +2023-04-27 22:02:54,921 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8134, 4.0056, 1.2154, 2.0546, 2.2497, 2.8089, 2.3238, 0.9580], + device='cuda:0'), covar=tensor([0.1288, 0.0908, 0.1717, 0.1200, 0.0941, 0.0960, 0.1419, 0.2003], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0239, 0.0136, 0.0121, 0.0132, 0.0153, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 22:03:04,782 INFO [finetune.py:976] (0/7) Epoch 25, batch 2250, loss[loss=0.1636, simple_loss=0.2502, pruned_loss=0.03852, over 4809.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2441, pruned_loss=0.04875, over 954063.26 frames. ], batch size: 33, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 22:03:08,464 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0748, 1.9252, 2.3497, 2.4634, 2.1260, 2.0357, 2.1408, 2.1794], + device='cuda:0'), covar=tensor([0.4441, 0.6738, 0.6639, 0.5249, 0.5912, 0.8034, 0.8592, 0.8331], + device='cuda:0'), in_proj_covar=tensor([0.0437, 0.0420, 0.0510, 0.0509, 0.0467, 0.0499, 0.0503, 0.0517], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:03:25,282 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8815, 2.4769, 1.9280, 1.9936, 1.5163, 1.5239, 2.0067, 1.4608], + device='cuda:0'), covar=tensor([0.1295, 0.1122, 0.1183, 0.1360, 0.1865, 0.1604, 0.0761, 0.1687], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0210, 0.0168, 0.0204, 0.0199, 0.0186, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 22:03:38,678 INFO [finetune.py:976] (0/7) Epoch 25, batch 2300, loss[loss=0.1723, simple_loss=0.2432, pruned_loss=0.05067, over 4896.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.245, pruned_loss=0.04877, over 953715.48 frames. ], batch size: 36, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 22:03:41,544 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.526e+02 1.763e+02 2.328e+02 5.368e+02, threshold=3.526e+02, percent-clipped=6.0 +2023-04-27 22:03:41,674 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139769.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:03:49,435 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.61 vs. limit=2.0 +2023-04-27 22:03:54,770 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139789.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:04:03,997 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139804.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:04:05,250 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139806.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:04:15,400 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2869, 2.1993, 1.8746, 1.8699, 2.1931, 1.8095, 2.6387, 1.6579], + device='cuda:0'), covar=tensor([0.3382, 0.1771, 0.4204, 0.2580, 0.1657, 0.2406, 0.1354, 0.4047], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0354, 0.0428, 0.0353, 0.0382, 0.0376, 0.0370, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:04:23,021 INFO [finetune.py:976] (0/7) Epoch 25, batch 2350, loss[loss=0.1558, simple_loss=0.2363, pruned_loss=0.03764, over 4867.00 frames. ], tot_loss[loss=0.1705, simple_loss=0.2435, pruned_loss=0.04876, over 953132.49 frames. ], batch size: 31, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 22:04:33,543 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.03 vs. limit=5.0 +2023-04-27 22:04:44,813 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139830.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:05:06,869 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=139850.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:05:06,878 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139850.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:05:08,874 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-27 22:05:17,196 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3821, 1.6080, 1.3365, 1.5362, 1.3427, 1.2027, 1.4395, 1.0508], + device='cuda:0'), covar=tensor([0.1549, 0.1156, 0.0851, 0.1147, 0.3409, 0.1217, 0.1608, 0.2091], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0300, 0.0213, 0.0277, 0.0311, 0.0255, 0.0248, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1344e-04, 1.1850e-04, 8.4008e-05, 1.0923e-04, 1.2556e-04, 1.0058e-04, + 1.0005e-04, 1.0379e-04], device='cuda:0') +2023-04-27 22:05:17,687 INFO [finetune.py:976] (0/7) Epoch 25, batch 2400, loss[loss=0.1727, simple_loss=0.2367, pruned_loss=0.0544, over 4849.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2411, pruned_loss=0.0483, over 953603.38 frames. ], batch size: 44, lr: 3.01e-03, grad_scale: 32.0 +2023-04-27 22:05:17,794 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139865.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:05:19,535 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139867.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:05:20,604 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.574e+01 1.558e+02 1.905e+02 2.229e+02 5.557e+02, threshold=3.809e+02, percent-clipped=2.0 +2023-04-27 22:05:23,148 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1151, 2.0271, 2.2737, 2.6270, 2.6191, 2.0115, 1.7464, 2.3768], + device='cuda:0'), covar=tensor([0.0801, 0.1051, 0.0582, 0.0534, 0.0549, 0.0797, 0.0743, 0.0501], + device='cuda:0'), in_proj_covar=tensor([0.0188, 0.0206, 0.0189, 0.0175, 0.0181, 0.0180, 0.0153, 0.0181], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:05:37,249 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139894.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:05:53,347 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=139911.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:05:55,628 INFO [finetune.py:976] (0/7) Epoch 25, batch 2450, loss[loss=0.1897, simple_loss=0.2504, pruned_loss=0.0645, over 4827.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.239, pruned_loss=0.04792, over 952644.07 frames. ], batch size: 30, lr: 3.01e-03, grad_scale: 16.0 +2023-04-27 22:06:04,988 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-27 22:06:15,191 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139927.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:06:18,764 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139932.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 22:06:27,870 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=139938.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:06:35,922 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=139942.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:06:37,755 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=139945.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:06:59,672 INFO [finetune.py:976] (0/7) Epoch 25, batch 2500, loss[loss=0.1611, simple_loss=0.2378, pruned_loss=0.04219, over 4768.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2398, pruned_loss=0.04826, over 951917.31 frames. ], batch size: 26, lr: 3.01e-03, grad_scale: 16.0 +2023-04-27 22:07:01,150 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 22:07:04,983 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.044e+02 1.582e+02 1.792e+02 2.204e+02 3.647e+02, threshold=3.585e+02, percent-clipped=0.0 +2023-04-27 22:07:11,615 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=139980.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:07:21,144 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=139993.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:07:25,477 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-140000.pt +2023-04-27 22:07:35,747 INFO [finetune.py:976] (0/7) Epoch 25, batch 2550, loss[loss=0.2059, simple_loss=0.278, pruned_loss=0.06695, over 4898.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2414, pruned_loss=0.04837, over 950971.80 frames. ], batch size: 43, lr: 3.01e-03, grad_scale: 16.0 +2023-04-27 22:07:49,703 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7698, 1.7127, 0.8167, 1.4024, 1.7400, 1.5833, 1.4686, 1.5117], + device='cuda:0'), covar=tensor([0.0455, 0.0341, 0.0341, 0.0538, 0.0271, 0.0499, 0.0506, 0.0548], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0051, 0.0045, 0.0038, 0.0052, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 22:08:09,494 INFO [finetune.py:976] (0/7) Epoch 25, batch 2600, loss[loss=0.2042, simple_loss=0.2703, pruned_loss=0.06907, over 4851.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2431, pruned_loss=0.04885, over 950917.30 frames. ], batch size: 44, lr: 3.01e-03, grad_scale: 16.0 +2023-04-27 22:08:09,622 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1575, 0.7049, 0.9490, 0.8317, 1.2284, 1.0296, 0.8960, 0.9958], + device='cuda:0'), covar=tensor([0.1656, 0.1585, 0.1860, 0.1439, 0.0986, 0.1400, 0.1682, 0.2123], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0309, 0.0351, 0.0286, 0.0329, 0.0306, 0.0299, 0.0374], + device='cuda:0'), out_proj_covar=tensor([6.3815e-05, 6.3671e-05, 7.3925e-05, 5.7355e-05, 6.7640e-05, 6.4118e-05, + 6.1933e-05, 7.9229e-05], device='cuda:0') +2023-04-27 22:08:12,527 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.025e+02 1.514e+02 1.836e+02 2.215e+02 4.899e+02, threshold=3.672e+02, percent-clipped=3.0 +2023-04-27 22:08:13,354 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-27 22:08:33,239 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7401, 1.3100, 1.8353, 2.2224, 1.8346, 1.7131, 1.7835, 1.7065], + device='cuda:0'), covar=tensor([0.4281, 0.7032, 0.6457, 0.5657, 0.5636, 0.7420, 0.8084, 0.9833], + device='cuda:0'), in_proj_covar=tensor([0.0438, 0.0421, 0.0512, 0.0509, 0.0466, 0.0499, 0.0504, 0.0518], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:08:35,644 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140103.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:08:39,899 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140110.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:08:42,878 INFO [finetune.py:976] (0/7) Epoch 25, batch 2650, loss[loss=0.1903, simple_loss=0.2649, pruned_loss=0.05782, over 4899.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2443, pruned_loss=0.04864, over 951364.93 frames. ], batch size: 37, lr: 3.01e-03, grad_scale: 16.0 +2023-04-27 22:08:49,429 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140125.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:09:04,018 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140145.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:09:13,151 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140160.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:09:14,361 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140162.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:09:15,625 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140164.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:09:16,092 INFO [finetune.py:976] (0/7) Epoch 25, batch 2700, loss[loss=0.1413, simple_loss=0.223, pruned_loss=0.02976, over 4741.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2427, pruned_loss=0.04754, over 951296.80 frames. ], batch size: 23, lr: 3.01e-03, grad_scale: 16.0 +2023-04-27 22:09:19,142 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.968e+01 1.589e+02 1.838e+02 2.247e+02 3.608e+02, threshold=3.675e+02, percent-clipped=0.0 +2023-04-27 22:09:19,881 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140171.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:09:50,088 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140206.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:10:00,542 INFO [finetune.py:976] (0/7) Epoch 25, batch 2750, loss[loss=0.1295, simple_loss=0.2057, pruned_loss=0.02667, over 4830.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2407, pruned_loss=0.04739, over 953580.36 frames. ], batch size: 30, lr: 3.01e-03, grad_scale: 16.0 +2023-04-27 22:10:18,048 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140227.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:10:32,149 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140238.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:11:02,666 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2143, 2.5584, 2.1785, 2.4396, 1.8219, 2.1719, 2.1104, 1.7511], + device='cuda:0'), covar=tensor([0.1587, 0.0968, 0.0748, 0.1047, 0.3165, 0.0945, 0.1725, 0.2130], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0299, 0.0212, 0.0275, 0.0310, 0.0253, 0.0247, 0.0262], + device='cuda:0'), out_proj_covar=tensor([1.1324e-04, 1.1813e-04, 8.3532e-05, 1.0851e-04, 1.2495e-04, 9.9874e-05, + 9.9577e-05, 1.0353e-04], device='cuda:0') +2023-04-27 22:11:03,253 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140259.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:11:12,261 INFO [finetune.py:976] (0/7) Epoch 25, batch 2800, loss[loss=0.1495, simple_loss=0.2215, pruned_loss=0.03877, over 4834.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2376, pruned_loss=0.04625, over 955618.03 frames. ], batch size: 30, lr: 3.01e-03, grad_scale: 16.0 +2023-04-27 22:11:15,318 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.076e+02 1.465e+02 1.747e+02 2.235e+02 4.062e+02, threshold=3.495e+02, percent-clipped=1.0 +2023-04-27 22:11:20,361 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=140275.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:11:21,682 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0136, 1.9554, 2.4086, 2.6534, 1.8834, 1.6576, 2.0052, 1.0250], + device='cuda:0'), covar=tensor([0.0517, 0.0722, 0.0368, 0.0550, 0.0720, 0.1075, 0.0647, 0.0698], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0066, 0.0068, 0.0074, 0.0095, 0.0073, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 22:11:32,986 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=140286.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:12:14,090 INFO [finetune.py:976] (0/7) Epoch 25, batch 2850, loss[loss=0.1706, simple_loss=0.2462, pruned_loss=0.04752, over 4820.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2368, pruned_loss=0.04635, over 953450.67 frames. ], batch size: 45, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:12:17,268 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140320.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:12:48,662 INFO [finetune.py:976] (0/7) Epoch 25, batch 2900, loss[loss=0.2108, simple_loss=0.2903, pruned_loss=0.06561, over 4732.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2395, pruned_loss=0.04702, over 951900.13 frames. ], batch size: 59, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:12:51,721 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.049e+02 1.618e+02 1.938e+02 2.286e+02 3.478e+02, threshold=3.877e+02, percent-clipped=0.0 +2023-04-27 22:13:00,448 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6556, 1.9443, 2.0474, 2.1481, 2.0266, 2.0944, 2.0993, 2.0367], + device='cuda:0'), covar=tensor([0.3444, 0.5211, 0.4185, 0.4174, 0.5272, 0.6967, 0.4879, 0.4703], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0374, 0.0326, 0.0339, 0.0349, 0.0393, 0.0358, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 22:13:02,710 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140387.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:13:20,922 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7630, 2.3964, 1.9586, 1.8084, 1.3303, 1.3563, 1.9933, 1.3222], + device='cuda:0'), covar=tensor([0.1703, 0.1407, 0.1359, 0.1716, 0.2282, 0.1979, 0.0948, 0.2072], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0211, 0.0169, 0.0204, 0.0200, 0.0187, 0.0157, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 22:13:22,623 INFO [finetune.py:976] (0/7) Epoch 25, batch 2950, loss[loss=0.171, simple_loss=0.254, pruned_loss=0.04397, over 4907.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2417, pruned_loss=0.04724, over 952772.00 frames. ], batch size: 37, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:13:28,752 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140425.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:13:42,312 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140445.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:13:44,119 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140448.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:13:52,226 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140459.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:13:52,884 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140460.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:13:54,551 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140462.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:13:56,277 INFO [finetune.py:976] (0/7) Epoch 25, batch 3000, loss[loss=0.1743, simple_loss=0.2455, pruned_loss=0.05154, over 4859.00 frames. ], tot_loss[loss=0.1696, simple_loss=0.2432, pruned_loss=0.04803, over 954134.72 frames. ], batch size: 31, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:13:56,278 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 22:13:59,128 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3137, 1.1848, 3.8439, 3.5614, 3.4630, 3.7624, 3.8386, 3.3796], + device='cuda:0'), covar=tensor([0.7272, 0.5698, 0.1226, 0.2017, 0.1467, 0.1182, 0.0756, 0.1902], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0308, 0.0409, 0.0412, 0.0349, 0.0414, 0.0319, 0.0370], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:14:01,593 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2422, 1.6022, 1.4143, 1.7713, 1.6732, 1.7383, 1.4442, 3.0823], + device='cuda:0'), covar=tensor([0.0602, 0.0789, 0.0751, 0.1154, 0.0606, 0.0440, 0.0681, 0.0167], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 22:14:07,210 INFO [finetune.py:1010] (0/7) Epoch 25, validation: loss=0.1531, simple_loss=0.2225, pruned_loss=0.04184, over 2265189.00 frames. +2023-04-27 22:14:07,210 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 22:14:07,886 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140466.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:14:10,182 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.175e+02 1.715e+02 2.080e+02 2.596e+02 3.715e+02, threshold=4.161e+02, percent-clipped=0.0 +2023-04-27 22:14:12,094 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=140473.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:14:21,876 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140489.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:14:24,277 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=140493.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:14:27,448 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-27 22:14:33,150 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140506.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:14:34,800 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=140508.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:14:35,488 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140509.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:14:36,041 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=140510.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:14:39,577 INFO [finetune.py:976] (0/7) Epoch 25, batch 3050, loss[loss=0.1712, simple_loss=0.246, pruned_loss=0.04822, over 4801.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2446, pruned_loss=0.04798, over 954841.65 frames. ], batch size: 40, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:15:02,553 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140550.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:15:04,911 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=140554.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:15:12,551 INFO [finetune.py:976] (0/7) Epoch 25, batch 3100, loss[loss=0.1378, simple_loss=0.2141, pruned_loss=0.03078, over 4776.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2431, pruned_loss=0.04738, over 956857.98 frames. ], batch size: 26, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:15:16,051 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.054e+02 1.508e+02 1.726e+02 2.242e+02 3.598e+02, threshold=3.452e+02, percent-clipped=0.0 +2023-04-27 22:15:16,196 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140570.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:15:21,487 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3690, 1.4118, 3.8588, 3.5808, 3.4123, 3.7603, 3.7540, 3.3605], + device='cuda:0'), covar=tensor([0.6976, 0.5640, 0.1259, 0.2022, 0.1281, 0.1907, 0.1258, 0.1651], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0308, 0.0408, 0.0411, 0.0348, 0.0413, 0.0319, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:16:07,368 INFO [finetune.py:976] (0/7) Epoch 25, batch 3150, loss[loss=0.1226, simple_loss=0.2034, pruned_loss=0.02091, over 4935.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2394, pruned_loss=0.04651, over 957444.44 frames. ], batch size: 38, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:16:07,436 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140615.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:16:19,065 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0079, 2.3562, 2.0482, 2.2163, 1.7378, 1.9832, 1.9969, 1.5995], + device='cuda:0'), covar=tensor([0.1866, 0.1160, 0.0779, 0.1196, 0.3273, 0.1198, 0.2032, 0.2389], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0301, 0.0213, 0.0277, 0.0311, 0.0255, 0.0249, 0.0264], + device='cuda:0'), out_proj_covar=tensor([1.1390e-04, 1.1894e-04, 8.3926e-05, 1.0907e-04, 1.2545e-04, 1.0054e-04, + 1.0041e-04, 1.0440e-04], device='cuda:0') +2023-04-27 22:17:13,351 INFO [finetune.py:976] (0/7) Epoch 25, batch 3200, loss[loss=0.19, simple_loss=0.2557, pruned_loss=0.06213, over 4822.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2367, pruned_loss=0.04601, over 956781.95 frames. ], batch size: 40, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:17:21,870 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.047e+02 1.528e+02 1.799e+02 2.210e+02 5.000e+02, threshold=3.599e+02, percent-clipped=4.0 +2023-04-27 22:17:34,515 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0731, 1.7429, 2.2182, 2.4789, 2.0997, 1.9996, 2.1276, 2.0397], + device='cuda:0'), covar=tensor([0.4781, 0.7281, 0.6971, 0.5519, 0.6263, 0.8202, 0.8321, 1.0776], + device='cuda:0'), in_proj_covar=tensor([0.0442, 0.0424, 0.0517, 0.0513, 0.0470, 0.0504, 0.0508, 0.0521], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:18:19,768 INFO [finetune.py:976] (0/7) Epoch 25, batch 3250, loss[loss=0.169, simple_loss=0.2408, pruned_loss=0.04863, over 4826.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2379, pruned_loss=0.04706, over 956157.63 frames. ], batch size: 39, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:18:26,958 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140723.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:18:49,238 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.95 vs. limit=5.0 +2023-04-27 22:18:55,996 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140743.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:19:10,849 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140759.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:19:20,223 INFO [finetune.py:976] (0/7) Epoch 25, batch 3300, loss[loss=0.1671, simple_loss=0.2435, pruned_loss=0.04539, over 4826.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2401, pruned_loss=0.04765, over 955965.46 frames. ], batch size: 40, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:19:20,951 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140766.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:19:27,805 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.140e+02 1.701e+02 1.963e+02 2.275e+02 6.006e+02, threshold=3.926e+02, percent-clipped=4.0 +2023-04-27 22:19:29,805 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3428, 1.8274, 2.3485, 2.6547, 2.2780, 1.7998, 1.5359, 2.0491], + device='cuda:0'), covar=tensor([0.3359, 0.3165, 0.1539, 0.2157, 0.2395, 0.2502, 0.4003, 0.1959], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0244, 0.0225, 0.0311, 0.0220, 0.0232, 0.0225, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 22:19:47,991 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140784.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:20:13,960 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=140807.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:20:20,736 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140809.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:20:20,764 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5917, 1.5104, 1.9924, 2.0464, 1.4445, 1.3664, 1.6481, 1.1480], + device='cuda:0'), covar=tensor([0.0659, 0.0650, 0.0331, 0.0612, 0.0777, 0.1106, 0.0605, 0.0577], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0067, 0.0066, 0.0067, 0.0074, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 22:20:23,767 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=140814.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:20:24,323 INFO [finetune.py:976] (0/7) Epoch 25, batch 3350, loss[loss=0.1517, simple_loss=0.2386, pruned_loss=0.03241, over 4887.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2429, pruned_loss=0.04863, over 955866.22 frames. ], batch size: 32, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:20:46,731 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140845.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:20:56,115 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.5158, 3.0491, 2.7323, 2.8635, 2.7258, 2.9491, 2.7892, 2.7840], + device='cuda:0'), covar=tensor([0.2801, 0.4723, 0.4043, 0.3906, 0.4824, 0.5628, 0.5287, 0.4620], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0376, 0.0328, 0.0340, 0.0350, 0.0392, 0.0359, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 22:20:59,011 INFO [finetune.py:976] (0/7) Epoch 25, batch 3400, loss[loss=0.1811, simple_loss=0.2583, pruned_loss=0.05194, over 4805.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2438, pruned_loss=0.0488, over 955757.14 frames. ], batch size: 40, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:20:59,080 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=140865.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:21:02,013 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.526e+02 1.810e+02 2.121e+02 3.949e+02, threshold=3.620e+02, percent-clipped=1.0 +2023-04-27 22:21:02,128 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140870.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:21:11,633 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8165, 1.4520, 2.0376, 2.2382, 1.8896, 1.7910, 1.8928, 1.8453], + device='cuda:0'), covar=tensor([0.3942, 0.6060, 0.5317, 0.4791, 0.5354, 0.7346, 0.7114, 0.7278], + device='cuda:0'), in_proj_covar=tensor([0.0439, 0.0421, 0.0512, 0.0509, 0.0467, 0.0501, 0.0505, 0.0517], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:21:20,449 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.60 vs. limit=5.0 +2023-04-27 22:21:32,399 INFO [finetune.py:976] (0/7) Epoch 25, batch 3450, loss[loss=0.1653, simple_loss=0.2378, pruned_loss=0.04645, over 4777.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2432, pruned_loss=0.04854, over 955529.29 frames. ], batch size: 29, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:21:32,500 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=140915.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:21:33,735 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140917.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:21:54,873 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=140947.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:22:05,093 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=140963.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:22:05,739 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4039, 2.9915, 0.8883, 1.7382, 1.7734, 2.2859, 1.7590, 0.9376], + device='cuda:0'), covar=tensor([0.1367, 0.0925, 0.1954, 0.1267, 0.1052, 0.0906, 0.1516, 0.1960], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0240, 0.0137, 0.0122, 0.0132, 0.0154, 0.0118, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 22:22:06,248 INFO [finetune.py:976] (0/7) Epoch 25, batch 3500, loss[loss=0.198, simple_loss=0.2664, pruned_loss=0.06481, over 4900.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2405, pruned_loss=0.04766, over 956231.06 frames. ], batch size: 36, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:22:09,318 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.074e+02 1.472e+02 1.838e+02 2.111e+02 1.137e+03, threshold=3.676e+02, percent-clipped=2.0 +2023-04-27 22:22:14,756 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=140978.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:22:35,389 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141008.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:22:39,553 INFO [finetune.py:976] (0/7) Epoch 25, batch 3550, loss[loss=0.1353, simple_loss=0.2137, pruned_loss=0.02846, over 4823.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2386, pruned_loss=0.04722, over 954050.46 frames. ], batch size: 39, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:22:58,180 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141043.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:23:06,835 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.1416, 4.0072, 2.9664, 4.7481, 4.1028, 4.1351, 1.6722, 3.9789], + device='cuda:0'), covar=tensor([0.1537, 0.1151, 0.2710, 0.1329, 0.3174, 0.1887, 0.5858, 0.2492], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0220, 0.0254, 0.0308, 0.0301, 0.0251, 0.0276, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 22:23:13,442 INFO [finetune.py:976] (0/7) Epoch 25, batch 3600, loss[loss=0.1698, simple_loss=0.2473, pruned_loss=0.04612, over 4908.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2365, pruned_loss=0.04695, over 955017.42 frames. ], batch size: 37, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:23:16,478 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.519e+02 1.759e+02 2.110e+02 6.340e+02, threshold=3.519e+02, percent-clipped=2.0 +2023-04-27 22:23:27,734 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141079.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:23:46,968 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=141091.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:24:19,424 INFO [finetune.py:976] (0/7) Epoch 25, batch 3650, loss[loss=0.2054, simple_loss=0.2816, pruned_loss=0.06464, over 4909.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2377, pruned_loss=0.04716, over 954088.85 frames. ], batch size: 36, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:24:42,746 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141143.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:24:43,937 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141145.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:24:45,807 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2511, 1.6101, 2.1553, 2.4144, 2.1760, 1.6905, 1.3620, 1.8323], + device='cuda:0'), covar=tensor([0.3525, 0.3341, 0.1795, 0.2322, 0.2525, 0.2887, 0.4025, 0.1901], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0246, 0.0228, 0.0313, 0.0221, 0.0234, 0.0227, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 22:24:49,305 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1143, 2.6527, 1.0725, 1.5854, 2.1119, 1.2904, 3.5557, 1.7929], + device='cuda:0'), covar=tensor([0.0658, 0.0660, 0.0780, 0.1159, 0.0482, 0.0939, 0.0268, 0.0580], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 22:24:57,940 INFO [finetune.py:976] (0/7) Epoch 25, batch 3700, loss[loss=0.1767, simple_loss=0.2475, pruned_loss=0.05301, over 4813.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2401, pruned_loss=0.04816, over 954084.32 frames. ], batch size: 30, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:24:58,017 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141165.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:24:58,034 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141165.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:25:00,977 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.179e+02 1.634e+02 1.921e+02 2.262e+02 4.366e+02, threshold=3.843e+02, percent-clipped=2.0 +2023-04-27 22:25:15,806 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=141193.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:25:15,869 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2550, 2.3702, 1.8161, 1.9818, 2.2861, 1.9091, 2.7674, 1.4446], + device='cuda:0'), covar=tensor([0.4343, 0.1857, 0.5173, 0.3544, 0.2189, 0.3066, 0.1664, 0.5528], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0355, 0.0431, 0.0355, 0.0384, 0.0378, 0.0373, 0.0429], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:25:22,648 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141204.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:25:27,277 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8153, 2.0858, 2.1474, 2.2362, 2.0539, 2.0826, 2.1379, 2.0845], + device='cuda:0'), covar=tensor([0.4262, 0.5900, 0.4791, 0.4582, 0.5883, 0.7243, 0.6159, 0.5741], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0375, 0.0328, 0.0340, 0.0350, 0.0393, 0.0359, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 22:25:30,031 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=141213.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:25:31,150 INFO [finetune.py:976] (0/7) Epoch 25, batch 3750, loss[loss=0.1391, simple_loss=0.2236, pruned_loss=0.02733, over 4864.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2411, pruned_loss=0.04784, over 954552.91 frames. ], batch size: 31, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:26:37,437 INFO [finetune.py:976] (0/7) Epoch 25, batch 3800, loss[loss=0.1795, simple_loss=0.2635, pruned_loss=0.0478, over 4801.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2425, pruned_loss=0.04806, over 955075.50 frames. ], batch size: 39, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:26:45,872 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.561e+02 1.876e+02 2.402e+02 4.277e+02, threshold=3.753e+02, percent-clipped=1.0 +2023-04-27 22:26:47,826 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141273.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:27:28,695 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141303.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:27:42,126 INFO [finetune.py:976] (0/7) Epoch 25, batch 3850, loss[loss=0.1613, simple_loss=0.2254, pruned_loss=0.04861, over 4819.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2424, pruned_loss=0.04876, over 954772.01 frames. ], batch size: 33, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:28:02,208 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141329.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:28:02,834 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7333, 1.5135, 1.6513, 2.0201, 1.9810, 1.7922, 1.4688, 1.9144], + device='cuda:0'), covar=tensor([0.0633, 0.0943, 0.0618, 0.0409, 0.0544, 0.0505, 0.0621, 0.0415], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0201, 0.0185, 0.0171, 0.0177, 0.0176, 0.0149, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:28:18,584 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.3610, 3.4039, 2.4444, 3.9067, 3.3935, 3.3843, 1.3854, 3.3301], + device='cuda:0'), covar=tensor([0.2058, 0.1282, 0.3250, 0.2239, 0.2846, 0.1893, 0.5778, 0.2588], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0219, 0.0254, 0.0307, 0.0299, 0.0251, 0.0274, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 22:28:27,562 INFO [finetune.py:976] (0/7) Epoch 25, batch 3900, loss[loss=0.1408, simple_loss=0.224, pruned_loss=0.02883, over 4828.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2403, pruned_loss=0.04795, over 956062.38 frames. ], batch size: 38, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:28:31,460 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.108e+01 1.538e+02 1.758e+02 2.132e+02 4.697e+02, threshold=3.516e+02, percent-clipped=1.0 +2023-04-27 22:28:37,501 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141379.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:28:44,190 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141390.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:28:49,711 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-27 22:29:00,435 INFO [finetune.py:976] (0/7) Epoch 25, batch 3950, loss[loss=0.1612, simple_loss=0.228, pruned_loss=0.04722, over 4814.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.238, pruned_loss=0.04748, over 955133.38 frames. ], batch size: 51, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:29:01,176 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6984, 1.5089, 1.6734, 2.1034, 2.0399, 1.7459, 1.4555, 1.9507], + device='cuda:0'), covar=tensor([0.0898, 0.1249, 0.0895, 0.0568, 0.0692, 0.0806, 0.0729, 0.0547], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0201, 0.0185, 0.0171, 0.0178, 0.0176, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:29:01,802 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141417.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 22:29:09,294 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=141427.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:29:17,840 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141441.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:29:26,761 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9285, 1.8666, 1.4403, 2.0731, 2.3395, 1.8315, 1.8103, 1.3649], + device='cuda:0'), covar=tensor([0.2366, 0.1567, 0.2009, 0.1637, 0.1063, 0.1895, 0.2206, 0.2618], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0306, 0.0349, 0.0284, 0.0326, 0.0303, 0.0297, 0.0370], + device='cuda:0'), out_proj_covar=tensor([6.3655e-05, 6.2923e-05, 7.3305e-05, 5.7039e-05, 6.6949e-05, 6.3428e-05, + 6.1504e-05, 7.8389e-05], device='cuda:0') +2023-04-27 22:29:33,820 INFO [finetune.py:976] (0/7) Epoch 25, batch 4000, loss[loss=0.1423, simple_loss=0.2076, pruned_loss=0.03851, over 4820.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2381, pruned_loss=0.04757, over 953207.59 frames. ], batch size: 25, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:29:33,913 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141465.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:29:36,894 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.301e+01 1.493e+02 1.709e+02 2.038e+02 4.561e+02, threshold=3.417e+02, percent-clipped=1.0 +2023-04-27 22:29:43,325 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141478.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 22:29:55,969 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141499.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:29:57,864 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141502.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:30:00,160 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6059, 2.0143, 1.7303, 1.9191, 1.4484, 1.6432, 1.6148, 1.3377], + device='cuda:0'), covar=tensor([0.1706, 0.1104, 0.0736, 0.1070, 0.3030, 0.1137, 0.1661, 0.2268], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0303, 0.0215, 0.0278, 0.0314, 0.0257, 0.0249, 0.0266], + device='cuda:0'), out_proj_covar=tensor([1.1450e-04, 1.1937e-04, 8.4523e-05, 1.0926e-04, 1.2638e-04, 1.0120e-04, + 1.0040e-04, 1.0509e-04], device='cuda:0') +2023-04-27 22:30:05,456 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=141513.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:30:06,624 INFO [finetune.py:976] (0/7) Epoch 25, batch 4050, loss[loss=0.162, simple_loss=0.2257, pruned_loss=0.04915, over 3993.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2413, pruned_loss=0.04827, over 953106.34 frames. ], batch size: 17, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:30:21,183 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.26 vs. limit=5.0 +2023-04-27 22:30:39,499 INFO [finetune.py:976] (0/7) Epoch 25, batch 4100, loss[loss=0.1212, simple_loss=0.1963, pruned_loss=0.02307, over 4754.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2436, pruned_loss=0.04894, over 953015.74 frames. ], batch size: 26, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:30:42,488 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.442e+01 1.586e+02 1.949e+02 2.336e+02 4.544e+02, threshold=3.898e+02, percent-clipped=7.0 +2023-04-27 22:30:44,325 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141573.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:31:04,715 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141603.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:31:17,880 INFO [finetune.py:976] (0/7) Epoch 25, batch 4150, loss[loss=0.1322, simple_loss=0.2096, pruned_loss=0.02733, over 4802.00 frames. ], tot_loss[loss=0.1722, simple_loss=0.2452, pruned_loss=0.0496, over 953705.99 frames. ], batch size: 25, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:31:21,613 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=141621.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:32:02,964 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=141651.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:32:15,767 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-27 22:32:23,211 INFO [finetune.py:976] (0/7) Epoch 25, batch 4200, loss[loss=0.1625, simple_loss=0.2275, pruned_loss=0.04869, over 4295.00 frames. ], tot_loss[loss=0.1714, simple_loss=0.245, pruned_loss=0.04894, over 954915.93 frames. ], batch size: 66, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:32:23,551 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-27 22:32:26,264 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.107e+02 1.638e+02 1.894e+02 2.201e+02 5.051e+02, threshold=3.789e+02, percent-clipped=1.0 +2023-04-27 22:32:47,244 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141685.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:33:09,358 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9555, 1.2265, 3.2957, 3.0444, 2.9579, 3.2103, 3.2134, 2.9047], + device='cuda:0'), covar=tensor([0.7071, 0.5497, 0.1441, 0.2124, 0.1504, 0.2068, 0.1326, 0.1806], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0306, 0.0406, 0.0408, 0.0348, 0.0410, 0.0317, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:33:28,598 INFO [finetune.py:976] (0/7) Epoch 25, batch 4250, loss[loss=0.1787, simple_loss=0.2435, pruned_loss=0.05697, over 4820.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2423, pruned_loss=0.04826, over 954389.22 frames. ], batch size: 41, lr: 3.00e-03, grad_scale: 16.0 +2023-04-27 22:33:31,010 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 22:34:30,331 INFO [finetune.py:976] (0/7) Epoch 25, batch 4300, loss[loss=0.1424, simple_loss=0.2047, pruned_loss=0.04006, over 4905.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2393, pruned_loss=0.04777, over 953990.92 frames. ], batch size: 43, lr: 2.99e-03, grad_scale: 16.0 +2023-04-27 22:34:39,436 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.343e+01 1.507e+02 1.747e+02 2.193e+02 4.425e+02, threshold=3.494e+02, percent-clipped=2.0 +2023-04-27 22:34:41,379 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141773.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 22:35:15,317 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=141797.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:35:16,565 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141799.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:35:36,800 INFO [finetune.py:976] (0/7) Epoch 25, batch 4350, loss[loss=0.1876, simple_loss=0.2529, pruned_loss=0.06111, over 4910.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2359, pruned_loss=0.04648, over 954550.25 frames. ], batch size: 43, lr: 2.99e-03, grad_scale: 16.0 +2023-04-27 22:35:38,804 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0909, 2.6125, 2.1860, 2.4775, 1.7451, 2.0880, 2.1962, 1.7750], + device='cuda:0'), covar=tensor([0.1947, 0.1364, 0.0976, 0.1459, 0.3319, 0.1489, 0.1881, 0.2587], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0303, 0.0216, 0.0279, 0.0315, 0.0258, 0.0251, 0.0267], + device='cuda:0'), out_proj_covar=tensor([1.1480e-04, 1.1966e-04, 8.4887e-05, 1.0990e-04, 1.2705e-04, 1.0173e-04, + 1.0102e-04, 1.0560e-04], device='cuda:0') +2023-04-27 22:35:39,397 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9099, 2.4306, 2.0793, 2.2911, 1.5742, 1.9319, 2.0602, 1.6164], + device='cuda:0'), covar=tensor([0.2367, 0.1667, 0.1006, 0.1602, 0.3962, 0.1575, 0.2093, 0.2666], + device='cuda:0'), in_proj_covar=tensor([0.0287, 0.0303, 0.0216, 0.0279, 0.0315, 0.0258, 0.0251, 0.0267], + device='cuda:0'), out_proj_covar=tensor([1.1477e-04, 1.1963e-04, 8.4870e-05, 1.0987e-04, 1.2702e-04, 1.0171e-04, + 1.0100e-04, 1.0557e-04], device='cuda:0') +2023-04-27 22:36:20,221 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=141847.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:36:30,297 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.67 vs. limit=2.0 +2023-04-27 22:36:40,438 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5912, 1.3625, 4.4052, 4.0933, 3.8230, 4.2228, 4.1147, 3.8814], + device='cuda:0'), covar=tensor([0.7052, 0.6235, 0.1126, 0.1872, 0.1237, 0.1803, 0.1096, 0.1574], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0311, 0.0413, 0.0413, 0.0352, 0.0416, 0.0322, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:36:42,224 INFO [finetune.py:976] (0/7) Epoch 25, batch 4400, loss[loss=0.1772, simple_loss=0.2486, pruned_loss=0.05294, over 4801.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2379, pruned_loss=0.04751, over 955105.10 frames. ], batch size: 51, lr: 2.99e-03, grad_scale: 16.0 +2023-04-27 22:36:43,532 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8372, 2.4503, 0.9498, 1.1775, 1.8554, 1.1566, 3.1629, 1.3620], + device='cuda:0'), covar=tensor([0.0893, 0.0954, 0.1075, 0.1764, 0.0673, 0.1397, 0.0355, 0.1066], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0050, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 22:36:50,565 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.176e+02 1.541e+02 1.847e+02 2.232e+02 3.824e+02, threshold=3.693e+02, percent-clipped=5.0 +2023-04-27 22:37:26,192 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5250, 1.3610, 4.3028, 3.6918, 3.7897, 4.0247, 3.8015, 3.5654], + device='cuda:0'), covar=tensor([0.9899, 0.8569, 0.1566, 0.3409, 0.2326, 0.3987, 0.2682, 0.3012], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0311, 0.0413, 0.0413, 0.0352, 0.0417, 0.0322, 0.0372], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:37:46,059 INFO [finetune.py:976] (0/7) Epoch 25, batch 4450, loss[loss=0.1629, simple_loss=0.2467, pruned_loss=0.03955, over 4753.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2407, pruned_loss=0.04813, over 954672.77 frames. ], batch size: 54, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:37:46,792 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141916.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:38:04,883 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=141928.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:38:19,654 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.33 vs. limit=2.0 +2023-04-27 22:38:50,109 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.31 vs. limit=2.0 +2023-04-27 22:38:51,130 INFO [finetune.py:976] (0/7) Epoch 25, batch 4500, loss[loss=0.2412, simple_loss=0.2968, pruned_loss=0.09284, over 4801.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2425, pruned_loss=0.04862, over 953121.55 frames. ], batch size: 40, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:38:59,402 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.003e+02 1.537e+02 1.854e+02 2.228e+02 3.851e+02, threshold=3.709e+02, percent-clipped=1.0 +2023-04-27 22:39:03,854 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141977.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:39:21,466 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=141985.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:39:23,958 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=141989.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:39:35,960 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-142000.pt +2023-04-27 22:39:57,183 INFO [finetune.py:976] (0/7) Epoch 25, batch 4550, loss[loss=0.1833, simple_loss=0.2556, pruned_loss=0.0555, over 4742.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2423, pruned_loss=0.0481, over 953133.85 frames. ], batch size: 54, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:40:18,880 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=142033.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:40:38,456 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-27 22:41:01,791 INFO [finetune.py:976] (0/7) Epoch 25, batch 4600, loss[loss=0.1802, simple_loss=0.2481, pruned_loss=0.05618, over 4846.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2421, pruned_loss=0.04768, over 953891.40 frames. ], batch size: 44, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:41:10,133 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.365e+01 1.636e+02 1.871e+02 2.331e+02 4.472e+02, threshold=3.743e+02, percent-clipped=1.0 +2023-04-27 22:41:12,051 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142073.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 22:41:13,223 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3132, 1.2509, 4.1372, 3.8312, 3.6204, 4.0250, 3.9283, 3.5898], + device='cuda:0'), covar=tensor([0.7756, 0.6152, 0.1122, 0.1879, 0.1208, 0.1493, 0.1358, 0.1842], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0309, 0.0409, 0.0411, 0.0350, 0.0415, 0.0320, 0.0369], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:41:23,274 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1212, 2.7078, 2.1586, 2.2654, 1.5578, 1.5637, 2.3130, 1.5064], + device='cuda:0'), covar=tensor([0.1561, 0.1325, 0.1302, 0.1487, 0.2115, 0.1749, 0.0829, 0.1949], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0209, 0.0168, 0.0203, 0.0199, 0.0185, 0.0156, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 22:41:42,591 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8086, 1.6144, 1.4232, 1.6675, 2.0058, 1.6992, 1.4728, 1.3330], + device='cuda:0'), covar=tensor([0.1478, 0.1247, 0.2051, 0.1177, 0.0853, 0.1481, 0.1804, 0.2440], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0308, 0.0352, 0.0287, 0.0329, 0.0305, 0.0299, 0.0374], + device='cuda:0'), out_proj_covar=tensor([6.4179e-05, 6.3358e-05, 7.3927e-05, 5.7444e-05, 6.7412e-05, 6.3730e-05, + 6.1901e-05, 7.9210e-05], device='cuda:0') +2023-04-27 22:41:43,176 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142097.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:42:06,627 INFO [finetune.py:976] (0/7) Epoch 25, batch 4650, loss[loss=0.1885, simple_loss=0.2528, pruned_loss=0.06209, over 4917.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2399, pruned_loss=0.04727, over 954438.55 frames. ], batch size: 38, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:42:15,788 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=142121.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 22:42:26,850 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8272, 1.3386, 1.9162, 2.3909, 1.9382, 1.7947, 1.8499, 1.7369], + device='cuda:0'), covar=tensor([0.4302, 0.6655, 0.6030, 0.5135, 0.5194, 0.7196, 0.7462, 0.9588], + device='cuda:0'), in_proj_covar=tensor([0.0438, 0.0419, 0.0513, 0.0507, 0.0466, 0.0500, 0.0503, 0.0516], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:42:28,024 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0331, 4.5237, 0.8350, 2.4673, 2.7734, 3.2155, 2.6314, 1.1209], + device='cuda:0'), covar=tensor([0.1330, 0.0832, 0.2279, 0.1231, 0.0909, 0.0980, 0.1463, 0.2048], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0242, 0.0138, 0.0122, 0.0133, 0.0154, 0.0119, 0.0121], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 22:42:47,434 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=142145.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:43:11,598 INFO [finetune.py:976] (0/7) Epoch 25, batch 4700, loss[loss=0.1188, simple_loss=0.1867, pruned_loss=0.02547, over 4791.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2372, pruned_loss=0.04674, over 955808.95 frames. ], batch size: 29, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:43:19,815 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.009e+02 1.574e+02 1.870e+02 2.251e+02 4.397e+02, threshold=3.741e+02, percent-clipped=1.0 +2023-04-27 22:43:30,491 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4950, 1.3977, 1.6160, 1.7879, 1.4160, 1.1509, 1.3346, 0.7979], + device='cuda:0'), covar=tensor([0.0459, 0.0519, 0.0397, 0.0424, 0.0605, 0.1363, 0.0535, 0.0614], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0067, 0.0069, 0.0075, 0.0095, 0.0073, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 22:43:32,246 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9279, 1.2009, 3.2971, 3.0833, 2.9735, 3.2281, 3.1779, 2.9089], + device='cuda:0'), covar=tensor([0.7338, 0.5480, 0.1394, 0.1965, 0.1403, 0.2027, 0.1691, 0.1734], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0307, 0.0407, 0.0409, 0.0348, 0.0413, 0.0318, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:43:53,116 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2161, 1.6225, 2.0027, 2.7783, 2.7789, 2.0063, 1.8885, 2.3226], + device='cuda:0'), covar=tensor([0.1176, 0.1781, 0.1179, 0.0773, 0.0678, 0.1334, 0.1041, 0.0844], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0201, 0.0185, 0.0171, 0.0177, 0.0177, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:44:17,215 INFO [finetune.py:976] (0/7) Epoch 25, batch 4750, loss[loss=0.1431, simple_loss=0.22, pruned_loss=0.0331, over 4888.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2358, pruned_loss=0.0463, over 955332.59 frames. ], batch size: 32, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:45:29,870 INFO [finetune.py:976] (0/7) Epoch 25, batch 4800, loss[loss=0.219, simple_loss=0.3015, pruned_loss=0.06829, over 4816.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2398, pruned_loss=0.04783, over 954209.29 frames. ], batch size: 40, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:45:34,012 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.041e+02 1.626e+02 1.868e+02 2.200e+02 5.189e+02, threshold=3.736e+02, percent-clipped=2.0 +2023-04-27 22:45:40,841 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142272.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:45:43,729 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5688, 1.5947, 1.9209, 1.9589, 1.5422, 1.2907, 1.6399, 1.0722], + device='cuda:0'), covar=tensor([0.0594, 0.0543, 0.0467, 0.0633, 0.0574, 0.0884, 0.0653, 0.0671], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0067, 0.0069, 0.0075, 0.0095, 0.0073, 0.0064], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 22:45:54,142 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142284.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:46:35,528 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0880, 1.9440, 1.6981, 1.7169, 2.2108, 1.7578, 2.4647, 1.4940], + device='cuda:0'), covar=tensor([0.3176, 0.1586, 0.4292, 0.2193, 0.1247, 0.1939, 0.1380, 0.4429], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0353, 0.0427, 0.0352, 0.0383, 0.0375, 0.0370, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:46:36,617 INFO [finetune.py:976] (0/7) Epoch 25, batch 4850, loss[loss=0.1631, simple_loss=0.2431, pruned_loss=0.04153, over 4840.00 frames. ], tot_loss[loss=0.1701, simple_loss=0.2428, pruned_loss=0.04864, over 953177.94 frames. ], batch size: 47, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:47:30,104 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5118, 3.5493, 0.9957, 2.0049, 1.9120, 2.6029, 1.9510, 0.9236], + device='cuda:0'), covar=tensor([0.1343, 0.0975, 0.1903, 0.1212, 0.1079, 0.0929, 0.1526, 0.2081], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0240, 0.0136, 0.0121, 0.0132, 0.0153, 0.0118, 0.0120], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 22:47:42,703 INFO [finetune.py:976] (0/7) Epoch 25, batch 4900, loss[loss=0.1959, simple_loss=0.275, pruned_loss=0.05842, over 4895.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.243, pruned_loss=0.04874, over 950939.24 frames. ], batch size: 36, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:47:51,898 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.944e+01 1.647e+02 1.933e+02 2.315e+02 7.407e+02, threshold=3.866e+02, percent-clipped=3.0 +2023-04-27 22:47:56,010 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0904, 0.7189, 0.9248, 0.7423, 1.2061, 0.9919, 0.8429, 0.9699], + device='cuda:0'), covar=tensor([0.1984, 0.1734, 0.2139, 0.1683, 0.1212, 0.1490, 0.1697, 0.2594], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0308, 0.0352, 0.0286, 0.0328, 0.0305, 0.0299, 0.0374], + device='cuda:0'), out_proj_covar=tensor([6.4206e-05, 6.3317e-05, 7.4080e-05, 5.7307e-05, 6.7155e-05, 6.3749e-05, + 6.1951e-05, 7.9251e-05], device='cuda:0') +2023-04-27 22:48:49,018 INFO [finetune.py:976] (0/7) Epoch 25, batch 4950, loss[loss=0.1757, simple_loss=0.247, pruned_loss=0.05217, over 4805.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2439, pruned_loss=0.04839, over 953051.32 frames. ], batch size: 40, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:49:48,437 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8789, 2.5972, 1.9094, 1.9131, 1.3940, 1.4356, 1.9718, 1.3559], + device='cuda:0'), covar=tensor([0.1599, 0.1223, 0.1349, 0.1559, 0.2249, 0.1880, 0.0979, 0.1983], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0209, 0.0168, 0.0203, 0.0199, 0.0185, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 22:49:48,523 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-27 22:49:58,430 INFO [finetune.py:976] (0/7) Epoch 25, batch 5000, loss[loss=0.1262, simple_loss=0.2014, pruned_loss=0.02549, over 4794.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2423, pruned_loss=0.048, over 952102.00 frames. ], batch size: 29, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:50:01,481 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.151e+02 1.520e+02 1.784e+02 2.172e+02 4.625e+02, threshold=3.567e+02, percent-clipped=1.0 +2023-04-27 22:50:44,765 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.5965, 4.5042, 3.3590, 5.3407, 4.6036, 4.6319, 1.8417, 4.5343], + device='cuda:0'), covar=tensor([0.1887, 0.0900, 0.2864, 0.1000, 0.2541, 0.1868, 0.6286, 0.2118], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0218, 0.0251, 0.0305, 0.0300, 0.0249, 0.0274, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 22:51:03,145 INFO [finetune.py:976] (0/7) Epoch 25, batch 5050, loss[loss=0.1476, simple_loss=0.2229, pruned_loss=0.03614, over 4850.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2397, pruned_loss=0.04754, over 950303.06 frames. ], batch size: 49, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:51:48,460 INFO [finetune.py:976] (0/7) Epoch 25, batch 5100, loss[loss=0.2055, simple_loss=0.2697, pruned_loss=0.07067, over 4904.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.236, pruned_loss=0.04608, over 950330.24 frames. ], batch size: 36, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:51:51,977 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.162e+02 1.504e+02 1.862e+02 2.404e+02 6.312e+02, threshold=3.723e+02, percent-clipped=2.0 +2023-04-27 22:51:53,290 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142572.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:51:53,925 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142573.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:52:01,147 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=142584.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:52:21,633 INFO [finetune.py:976] (0/7) Epoch 25, batch 5150, loss[loss=0.1401, simple_loss=0.2048, pruned_loss=0.03766, over 4795.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2364, pruned_loss=0.0462, over 952119.22 frames. ], batch size: 26, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:52:26,229 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=142620.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:52:33,587 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=142632.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:52:35,329 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142634.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:52:35,543 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-27 22:52:43,036 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3485, 1.4594, 1.3517, 1.6870, 1.5556, 1.8059, 1.3500, 3.3536], + device='cuda:0'), covar=tensor([0.0554, 0.0798, 0.0757, 0.1189, 0.0634, 0.0621, 0.0753, 0.0151], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 22:52:56,113 INFO [finetune.py:976] (0/7) Epoch 25, batch 5200, loss[loss=0.1768, simple_loss=0.2677, pruned_loss=0.04293, over 4809.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2391, pruned_loss=0.04709, over 950745.57 frames. ], batch size: 45, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:53:00,178 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.581e+02 1.903e+02 2.348e+02 3.515e+02, threshold=3.805e+02, percent-clipped=0.0 +2023-04-27 22:53:29,660 INFO [finetune.py:976] (0/7) Epoch 25, batch 5250, loss[loss=0.2227, simple_loss=0.2874, pruned_loss=0.079, over 4902.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2413, pruned_loss=0.04731, over 951355.60 frames. ], batch size: 37, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:53:33,366 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.94 vs. limit=5.0 +2023-04-27 22:53:36,253 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=142724.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:53:50,410 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0416, 1.8341, 2.0227, 2.4009, 2.3758, 2.0100, 1.6559, 2.2709], + device='cuda:0'), covar=tensor([0.0755, 0.1050, 0.0670, 0.0458, 0.0554, 0.0686, 0.0685, 0.0465], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0202, 0.0185, 0.0171, 0.0177, 0.0177, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 22:54:19,566 INFO [finetune.py:976] (0/7) Epoch 25, batch 5300, loss[loss=0.1863, simple_loss=0.2584, pruned_loss=0.05707, over 4776.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2427, pruned_loss=0.04804, over 952019.24 frames. ], batch size: 29, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:54:22,611 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.008e+02 1.572e+02 1.803e+02 2.243e+02 5.104e+02, threshold=3.606e+02, percent-clipped=2.0 +2023-04-27 22:54:44,815 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=142785.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:55:27,672 INFO [finetune.py:976] (0/7) Epoch 25, batch 5350, loss[loss=0.1478, simple_loss=0.2082, pruned_loss=0.04375, over 3977.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.243, pruned_loss=0.04746, over 953457.90 frames. ], batch size: 17, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:56:34,014 INFO [finetune.py:976] (0/7) Epoch 25, batch 5400, loss[loss=0.1712, simple_loss=0.2445, pruned_loss=0.04894, over 4836.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.24, pruned_loss=0.04655, over 954072.21 frames. ], batch size: 39, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:56:42,412 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.018e+02 1.482e+02 1.784e+02 2.095e+02 4.679e+02, threshold=3.568e+02, percent-clipped=3.0 +2023-04-27 22:57:18,641 INFO [finetune.py:976] (0/7) Epoch 25, batch 5450, loss[loss=0.1219, simple_loss=0.1928, pruned_loss=0.02553, over 4731.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2381, pruned_loss=0.04595, over 956179.05 frames. ], batch size: 23, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:57:27,229 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=142929.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:57:48,723 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-27 22:57:49,175 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1259, 2.7527, 2.4301, 2.6237, 2.0637, 2.3701, 2.4968, 1.8619], + device='cuda:0'), covar=tensor([0.2070, 0.1156, 0.0689, 0.1141, 0.2926, 0.1078, 0.1814, 0.2424], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0301, 0.0213, 0.0276, 0.0312, 0.0256, 0.0248, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1329e-04, 1.1864e-04, 8.3892e-05, 1.0858e-04, 1.2578e-04, 1.0071e-04, + 1.0003e-04, 1.0478e-04], device='cuda:0') +2023-04-27 22:57:52,209 INFO [finetune.py:976] (0/7) Epoch 25, batch 5500, loss[loss=0.1624, simple_loss=0.234, pruned_loss=0.04543, over 4812.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2356, pruned_loss=0.04556, over 957431.19 frames. ], batch size: 45, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:57:55,643 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.918e+01 1.380e+02 1.724e+02 2.155e+02 4.005e+02, threshold=3.448e+02, percent-clipped=1.0 +2023-04-27 22:58:11,885 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6725, 1.4409, 1.2727, 1.5474, 1.8647, 1.5495, 1.3881, 1.2544], + device='cuda:0'), covar=tensor([0.1514, 0.1511, 0.1841, 0.1193, 0.0962, 0.1603, 0.1914, 0.2218], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0305, 0.0349, 0.0283, 0.0325, 0.0302, 0.0296, 0.0370], + device='cuda:0'), out_proj_covar=tensor([6.3583e-05, 6.2625e-05, 7.3247e-05, 5.6668e-05, 6.6636e-05, 6.3192e-05, + 6.1256e-05, 7.8537e-05], device='cuda:0') +2023-04-27 22:58:26,144 INFO [finetune.py:976] (0/7) Epoch 25, batch 5550, loss[loss=0.1818, simple_loss=0.255, pruned_loss=0.0543, over 4926.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2375, pruned_loss=0.04633, over 957958.45 frames. ], batch size: 38, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:58:57,671 INFO [finetune.py:976] (0/7) Epoch 25, batch 5600, loss[loss=0.2126, simple_loss=0.2703, pruned_loss=0.07743, over 4760.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2411, pruned_loss=0.04664, over 956426.05 frames. ], batch size: 54, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:58:58,383 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.21 vs. limit=5.0 +2023-04-27 22:59:00,542 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.011e+02 1.590e+02 1.837e+02 2.169e+02 3.781e+02, threshold=3.675e+02, percent-clipped=1.0 +2023-04-27 22:59:06,451 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143080.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 22:59:24,968 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3890, 1.6047, 1.4313, 1.5695, 1.3725, 1.3422, 1.3416, 1.1447], + device='cuda:0'), covar=tensor([0.1474, 0.1349, 0.0911, 0.1207, 0.3287, 0.1145, 0.1753, 0.2054], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0302, 0.0214, 0.0277, 0.0314, 0.0256, 0.0249, 0.0267], + device='cuda:0'), out_proj_covar=tensor([1.1386e-04, 1.1909e-04, 8.4265e-05, 1.0900e-04, 1.2638e-04, 1.0098e-04, + 1.0048e-04, 1.0535e-04], device='cuda:0') +2023-04-27 22:59:27,655 INFO [finetune.py:976] (0/7) Epoch 25, batch 5650, loss[loss=0.1703, simple_loss=0.2537, pruned_loss=0.04346, over 4797.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2441, pruned_loss=0.04733, over 957993.78 frames. ], batch size: 29, lr: 2.99e-03, grad_scale: 32.0 +2023-04-27 22:59:29,522 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0450, 2.4983, 1.0706, 1.4963, 1.9391, 1.2510, 3.3229, 1.8565], + device='cuda:0'), covar=tensor([0.0714, 0.0685, 0.0749, 0.1213, 0.0484, 0.1014, 0.0281, 0.0526], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 22:59:51,071 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5450, 2.3454, 2.6496, 3.1352, 2.3860, 2.1531, 2.4963, 1.8374], + device='cuda:0'), covar=tensor([0.0391, 0.0596, 0.0356, 0.0479, 0.0558, 0.0850, 0.0495, 0.0552], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0066, 0.0068, 0.0073, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 23:00:24,804 INFO [finetune.py:976] (0/7) Epoch 25, batch 5700, loss[loss=0.1387, simple_loss=0.1973, pruned_loss=0.04009, over 3998.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2388, pruned_loss=0.04593, over 938756.95 frames. ], batch size: 17, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:00:27,757 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.938e+01 1.487e+02 1.759e+02 2.216e+02 4.830e+02, threshold=3.518e+02, percent-clipped=2.0 +2023-04-27 23:00:46,551 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8878, 2.3689, 2.3439, 2.3616, 2.2197, 2.3776, 2.4028, 2.3862], + device='cuda:0'), covar=tensor([0.3416, 0.5034, 0.4270, 0.4799, 0.5505, 0.6590, 0.5015, 0.4496], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0375, 0.0328, 0.0341, 0.0349, 0.0395, 0.0360, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 23:00:51,906 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-25.pt +2023-04-27 23:01:04,602 INFO [finetune.py:976] (0/7) Epoch 26, batch 0, loss[loss=0.188, simple_loss=0.262, pruned_loss=0.05701, over 4810.00 frames. ], tot_loss[loss=0.188, simple_loss=0.262, pruned_loss=0.05701, over 4810.00 frames. ], batch size: 38, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:01:04,603 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 23:01:07,355 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8737, 2.1679, 1.9283, 2.1222, 1.7264, 1.8360, 1.7674, 1.4624], + device='cuda:0'), covar=tensor([0.1665, 0.1395, 0.0834, 0.1148, 0.3278, 0.1065, 0.1656, 0.2368], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0302, 0.0213, 0.0276, 0.0313, 0.0256, 0.0248, 0.0266], + device='cuda:0'), out_proj_covar=tensor([1.1349e-04, 1.1920e-04, 8.3963e-05, 1.0865e-04, 1.2629e-04, 1.0072e-04, + 1.0005e-04, 1.0505e-04], device='cuda:0') +2023-04-27 23:01:09,420 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4120, 1.2886, 1.6631, 1.6394, 1.2697, 1.2446, 1.3986, 0.8348], + device='cuda:0'), covar=tensor([0.0568, 0.0681, 0.0420, 0.0562, 0.0776, 0.1181, 0.0499, 0.0614], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0067, 0.0066, 0.0068, 0.0073, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 23:01:12,412 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2381, 1.6270, 1.7366, 1.8683, 1.8383, 1.9536, 1.7625, 1.7841], + device='cuda:0'), covar=tensor([0.3665, 0.4961, 0.4118, 0.3942, 0.5358, 0.6052, 0.5000, 0.4816], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0375, 0.0328, 0.0341, 0.0349, 0.0395, 0.0360, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 23:01:26,500 INFO [finetune.py:1010] (0/7) Epoch 26, validation: loss=0.1543, simple_loss=0.2237, pruned_loss=0.04251, over 2265189.00 frames. +2023-04-27 23:01:26,501 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 23:01:34,837 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-27 23:02:16,890 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143229.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:02:34,932 INFO [finetune.py:976] (0/7) Epoch 26, batch 50, loss[loss=0.1651, simple_loss=0.2429, pruned_loss=0.04366, over 4869.00 frames. ], tot_loss[loss=0.175, simple_loss=0.2492, pruned_loss=0.05047, over 216064.08 frames. ], batch size: 34, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:02:38,588 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7217, 1.3388, 1.4253, 1.4950, 1.8244, 1.5533, 1.3024, 1.4077], + device='cuda:0'), covar=tensor([0.1370, 0.1326, 0.1486, 0.1203, 0.0827, 0.1195, 0.1750, 0.1846], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0305, 0.0348, 0.0283, 0.0324, 0.0302, 0.0295, 0.0370], + device='cuda:0'), out_proj_covar=tensor([6.3532e-05, 6.2615e-05, 7.3014e-05, 5.6685e-05, 6.6534e-05, 6.3119e-05, + 6.1101e-05, 7.8460e-05], device='cuda:0') +2023-04-27 23:03:09,819 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.418e+01 1.474e+02 1.785e+02 2.280e+02 3.483e+02, threshold=3.571e+02, percent-clipped=0.0 +2023-04-27 23:03:19,707 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=143277.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:03:31,436 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4818, 2.0254, 2.3822, 2.9905, 2.3912, 1.9535, 1.9673, 2.2596], + device='cuda:0'), covar=tensor([0.3031, 0.2897, 0.1547, 0.2307, 0.2623, 0.2516, 0.3550, 0.2030], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0247, 0.0229, 0.0315, 0.0221, 0.0235, 0.0228, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 23:03:40,350 INFO [finetune.py:976] (0/7) Epoch 26, batch 100, loss[loss=0.2014, simple_loss=0.2638, pruned_loss=0.06948, over 4843.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2406, pruned_loss=0.04765, over 381751.31 frames. ], batch size: 44, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:03:40,425 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.5860, 3.5006, 2.7284, 4.1434, 3.6202, 3.6032, 1.5964, 3.5242], + device='cuda:0'), covar=tensor([0.1868, 0.1432, 0.3552, 0.2003, 0.2891, 0.1946, 0.6055, 0.2685], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0217, 0.0251, 0.0304, 0.0297, 0.0247, 0.0272, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 23:04:19,086 INFO [finetune.py:976] (0/7) Epoch 26, batch 150, loss[loss=0.1579, simple_loss=0.2283, pruned_loss=0.04374, over 4820.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2366, pruned_loss=0.0469, over 509982.82 frames. ], batch size: 30, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:04:25,892 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-04-27 23:04:37,628 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.031e+02 1.476e+02 1.692e+02 2.051e+02 5.029e+02, threshold=3.384e+02, percent-clipped=1.0 +2023-04-27 23:04:43,892 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=143380.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:04:52,922 INFO [finetune.py:976] (0/7) Epoch 26, batch 200, loss[loss=0.1776, simple_loss=0.2335, pruned_loss=0.0608, over 4116.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2345, pruned_loss=0.0461, over 608426.51 frames. ], batch size: 65, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:04:59,060 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=143400.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 23:04:59,705 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8694, 2.5034, 1.9630, 2.0695, 1.5708, 1.4852, 2.0475, 1.5142], + device='cuda:0'), covar=tensor([0.1366, 0.1130, 0.1131, 0.1357, 0.1910, 0.1585, 0.0809, 0.1775], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0210, 0.0168, 0.0204, 0.0200, 0.0185, 0.0156, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 23:05:16,578 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=143428.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:05:31,681 INFO [finetune.py:976] (0/7) Epoch 26, batch 250, loss[loss=0.1901, simple_loss=0.2704, pruned_loss=0.05485, over 4828.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2379, pruned_loss=0.04733, over 683859.30 frames. ], batch size: 40, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:05:44,904 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=143461.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 23:05:50,285 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.157e+02 1.556e+02 1.824e+02 2.331e+02 6.380e+02, threshold=3.648e+02, percent-clipped=4.0 +2023-04-27 23:05:58,115 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=143473.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 23:06:15,187 INFO [finetune.py:976] (0/7) Epoch 26, batch 300, loss[loss=0.1759, simple_loss=0.2493, pruned_loss=0.0512, over 4921.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.242, pruned_loss=0.0482, over 744272.14 frames. ], batch size: 42, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:06:43,057 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=143534.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 23:06:43,099 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-27 23:06:48,238 INFO [finetune.py:976] (0/7) Epoch 26, batch 350, loss[loss=0.1629, simple_loss=0.2458, pruned_loss=0.04003, over 4841.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2435, pruned_loss=0.04885, over 790807.52 frames. ], batch size: 30, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:07:08,173 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.499e+02 1.727e+02 2.023e+02 3.986e+02, threshold=3.454e+02, percent-clipped=1.0 +2023-04-27 23:07:22,102 INFO [finetune.py:976] (0/7) Epoch 26, batch 400, loss[loss=0.1986, simple_loss=0.2675, pruned_loss=0.06487, over 4806.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2446, pruned_loss=0.04869, over 827106.07 frames. ], batch size: 41, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:07:55,515 INFO [finetune.py:976] (0/7) Epoch 26, batch 450, loss[loss=0.2211, simple_loss=0.2965, pruned_loss=0.0729, over 4882.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.243, pruned_loss=0.04832, over 854754.35 frames. ], batch size: 35, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:08:20,551 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.962e+01 1.532e+02 1.796e+02 2.161e+02 5.029e+02, threshold=3.592e+02, percent-clipped=5.0 +2023-04-27 23:08:32,399 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.27 vs. limit=5.0 +2023-04-27 23:08:43,966 INFO [finetune.py:976] (0/7) Epoch 26, batch 500, loss[loss=0.1493, simple_loss=0.2224, pruned_loss=0.03809, over 4910.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2399, pruned_loss=0.04699, over 876765.33 frames. ], batch size: 29, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:09:27,560 INFO [finetune.py:976] (0/7) Epoch 26, batch 550, loss[loss=0.1454, simple_loss=0.2191, pruned_loss=0.03582, over 4753.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2372, pruned_loss=0.04648, over 893823.45 frames. ], batch size: 27, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:09:37,685 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143756.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 23:09:47,112 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.995e+01 1.539e+02 1.829e+02 2.212e+02 3.034e+02, threshold=3.659e+02, percent-clipped=1.0 +2023-04-27 23:10:00,517 INFO [finetune.py:976] (0/7) Epoch 26, batch 600, loss[loss=0.2335, simple_loss=0.2983, pruned_loss=0.08435, over 4250.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2374, pruned_loss=0.04693, over 903260.15 frames. ], batch size: 65, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:10:03,074 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9067, 2.3328, 1.8246, 1.8287, 1.3442, 1.3993, 1.9349, 1.2739], + device='cuda:0'), covar=tensor([0.1605, 0.1421, 0.1429, 0.1709, 0.2308, 0.1854, 0.1010, 0.2069], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0210, 0.0169, 0.0205, 0.0201, 0.0185, 0.0157, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 23:10:12,707 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7991, 3.7667, 2.8743, 4.4003, 3.8452, 3.7613, 1.6094, 3.8320], + device='cuda:0'), covar=tensor([0.1695, 0.1148, 0.3496, 0.1562, 0.3088, 0.1728, 0.5740, 0.2321], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0218, 0.0251, 0.0305, 0.0297, 0.0246, 0.0273, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 23:10:25,721 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=143829.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 23:10:33,615 INFO [finetune.py:976] (0/7) Epoch 26, batch 650, loss[loss=0.1681, simple_loss=0.2477, pruned_loss=0.04425, over 4832.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2412, pruned_loss=0.04814, over 915013.73 frames. ], batch size: 49, lr: 2.98e-03, grad_scale: 32.0 +2023-04-27 23:11:05,254 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4475, 1.7919, 1.9376, 1.9903, 1.8166, 1.8167, 1.9537, 1.9243], + device='cuda:0'), covar=tensor([0.3815, 0.5466, 0.4078, 0.4068, 0.5610, 0.7617, 0.5074, 0.4625], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0375, 0.0327, 0.0340, 0.0350, 0.0395, 0.0360, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 23:11:14,499 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.206e+02 1.587e+02 1.916e+02 2.318e+02 7.608e+02, threshold=3.833e+02, percent-clipped=3.0 +2023-04-27 23:11:26,757 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4642, 1.7891, 1.9141, 1.9791, 1.7954, 1.8290, 1.9234, 1.9141], + device='cuda:0'), covar=tensor([0.4451, 0.5589, 0.4382, 0.4462, 0.5981, 0.7531, 0.5367, 0.4860], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0374, 0.0327, 0.0339, 0.0349, 0.0394, 0.0360, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 23:11:40,022 INFO [finetune.py:976] (0/7) Epoch 26, batch 700, loss[loss=0.133, simple_loss=0.1982, pruned_loss=0.03394, over 4724.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2413, pruned_loss=0.04784, over 923592.83 frames. ], batch size: 23, lr: 2.98e-03, grad_scale: 64.0 +2023-04-27 23:12:45,261 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6289, 3.8774, 0.7875, 2.1372, 2.0761, 2.6495, 2.2669, 1.0204], + device='cuda:0'), covar=tensor([0.1416, 0.0795, 0.2222, 0.1200, 0.1000, 0.1088, 0.1405, 0.2326], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0239, 0.0136, 0.0121, 0.0132, 0.0153, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 23:12:45,798 INFO [finetune.py:976] (0/7) Epoch 26, batch 750, loss[loss=0.1404, simple_loss=0.2135, pruned_loss=0.03361, over 4777.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2416, pruned_loss=0.04774, over 928841.27 frames. ], batch size: 51, lr: 2.98e-03, grad_scale: 64.0 +2023-04-27 23:13:26,583 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.623e+02 1.843e+02 2.196e+02 3.710e+02, threshold=3.686e+02, percent-clipped=0.0 +2023-04-27 23:13:35,934 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2550, 1.6156, 2.0814, 2.3266, 2.0929, 1.6279, 1.2749, 1.8652], + device='cuda:0'), covar=tensor([0.2947, 0.3056, 0.1531, 0.2057, 0.2421, 0.2513, 0.4106, 0.1875], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0245, 0.0227, 0.0312, 0.0219, 0.0234, 0.0226, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 23:13:56,906 INFO [finetune.py:976] (0/7) Epoch 26, batch 800, loss[loss=0.1665, simple_loss=0.2415, pruned_loss=0.04578, over 4860.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2406, pruned_loss=0.04685, over 936143.62 frames. ], batch size: 44, lr: 2.98e-03, grad_scale: 64.0 +2023-04-27 23:14:07,392 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-144000.pt +2023-04-27 23:14:11,338 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.76 vs. limit=5.0 +2023-04-27 23:14:18,384 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-27 23:14:52,464 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9816, 1.3773, 1.6020, 2.2996, 2.3251, 1.8379, 1.5502, 2.0236], + device='cuda:0'), covar=tensor([0.0799, 0.1639, 0.1131, 0.0550, 0.0594, 0.0914, 0.0810, 0.0613], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0202, 0.0185, 0.0171, 0.0177, 0.0177, 0.0150, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 23:15:01,724 INFO [finetune.py:976] (0/7) Epoch 26, batch 850, loss[loss=0.1781, simple_loss=0.2499, pruned_loss=0.05316, over 4727.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.239, pruned_loss=0.04634, over 941679.69 frames. ], batch size: 59, lr: 2.98e-03, grad_scale: 64.0 +2023-04-27 23:15:15,457 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=144056.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 23:15:35,340 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.501e+02 1.661e+02 2.180e+02 3.367e+02, threshold=3.322e+02, percent-clipped=0.0 +2023-04-27 23:16:05,578 INFO [finetune.py:976] (0/7) Epoch 26, batch 900, loss[loss=0.1457, simple_loss=0.2179, pruned_loss=0.03673, over 4933.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2369, pruned_loss=0.04629, over 946096.50 frames. ], batch size: 33, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:16:08,968 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-27 23:16:12,927 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=144104.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 23:16:17,978 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-27 23:16:22,989 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0319, 1.0659, 1.1613, 1.1663, 1.0067, 0.9347, 0.9810, 0.6713], + device='cuda:0'), covar=tensor([0.0480, 0.0537, 0.0411, 0.0541, 0.0689, 0.1072, 0.0414, 0.0544], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0066, 0.0068, 0.0073, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 23:16:34,441 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=144129.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 23:16:41,249 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0822, 2.4812, 2.1544, 2.4064, 1.7239, 2.1380, 2.2118, 1.7709], + device='cuda:0'), covar=tensor([0.1770, 0.1060, 0.0711, 0.1128, 0.3294, 0.1007, 0.1859, 0.2311], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0301, 0.0213, 0.0276, 0.0313, 0.0255, 0.0249, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1395e-04, 1.1845e-04, 8.3950e-05, 1.0873e-04, 1.2625e-04, 1.0030e-04, + 1.0024e-04, 1.0456e-04], device='cuda:0') +2023-04-27 23:16:53,904 INFO [finetune.py:976] (0/7) Epoch 26, batch 950, loss[loss=0.1441, simple_loss=0.2195, pruned_loss=0.03433, over 4712.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2361, pruned_loss=0.04623, over 948325.26 frames. ], batch size: 23, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:17:28,112 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.443e+02 1.800e+02 2.192e+02 5.909e+02, threshold=3.600e+02, percent-clipped=3.0 +2023-04-27 23:17:37,856 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=144177.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 23:17:59,697 INFO [finetune.py:976] (0/7) Epoch 26, batch 1000, loss[loss=0.2203, simple_loss=0.2992, pruned_loss=0.07066, over 4758.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2396, pruned_loss=0.0472, over 950231.41 frames. ], batch size: 54, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:18:09,861 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-27 23:18:32,530 INFO [finetune.py:976] (0/7) Epoch 26, batch 1050, loss[loss=0.2076, simple_loss=0.2794, pruned_loss=0.06789, over 4813.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2416, pruned_loss=0.04766, over 949878.86 frames. ], batch size: 45, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:18:45,252 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8393, 2.4629, 1.9472, 1.8631, 1.3260, 1.3761, 2.0488, 1.3245], + device='cuda:0'), covar=tensor([0.1670, 0.1329, 0.1458, 0.1761, 0.2481, 0.2079, 0.1019, 0.2187], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0210, 0.0168, 0.0204, 0.0200, 0.0185, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 23:18:51,251 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.174e+02 1.491e+02 1.893e+02 2.144e+02 7.486e+02, threshold=3.787e+02, percent-clipped=1.0 +2023-04-27 23:19:06,557 INFO [finetune.py:976] (0/7) Epoch 26, batch 1100, loss[loss=0.2191, simple_loss=0.2766, pruned_loss=0.08077, over 4889.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2431, pruned_loss=0.04833, over 950717.20 frames. ], batch size: 35, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:19:39,802 INFO [finetune.py:976] (0/7) Epoch 26, batch 1150, loss[loss=0.1642, simple_loss=0.2454, pruned_loss=0.04147, over 4916.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2439, pruned_loss=0.04802, over 952369.01 frames. ], batch size: 33, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:19:46,922 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4314, 2.9373, 0.9853, 1.6927, 2.5484, 1.4483, 4.2331, 1.9530], + device='cuda:0'), covar=tensor([0.0647, 0.0771, 0.0843, 0.1194, 0.0462, 0.1067, 0.0237, 0.0616], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 23:19:58,627 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8357, 3.4360, 0.8332, 2.0000, 1.9884, 2.4264, 1.9408, 0.9809], + device='cuda:0'), covar=tensor([0.1159, 0.0892, 0.2167, 0.1126, 0.1011, 0.1031, 0.1455, 0.2105], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0237, 0.0135, 0.0120, 0.0131, 0.0151, 0.0116, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 23:19:58,649 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6258, 1.5730, 0.6922, 1.3354, 1.6265, 1.4666, 1.4145, 1.4897], + device='cuda:0'), covar=tensor([0.0485, 0.0382, 0.0348, 0.0560, 0.0278, 0.0529, 0.0488, 0.0587], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0038, 0.0053, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 23:19:59,727 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.045e+02 1.546e+02 1.832e+02 2.212e+02 3.348e+02, threshold=3.664e+02, percent-clipped=0.0 +2023-04-27 23:20:13,103 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9405, 3.7971, 3.0236, 4.5316, 3.7245, 3.8938, 2.0068, 3.8997], + device='cuda:0'), covar=tensor([0.1617, 0.1054, 0.4591, 0.0976, 0.2641, 0.1608, 0.4591, 0.2044], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0219, 0.0253, 0.0307, 0.0300, 0.0248, 0.0274, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 23:20:14,235 INFO [finetune.py:976] (0/7) Epoch 26, batch 1200, loss[loss=0.1693, simple_loss=0.2444, pruned_loss=0.04713, over 4815.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2419, pruned_loss=0.04716, over 952871.28 frames. ], batch size: 40, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:20:32,337 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=144411.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:21:13,638 INFO [finetune.py:976] (0/7) Epoch 26, batch 1250, loss[loss=0.1517, simple_loss=0.2301, pruned_loss=0.03671, over 4822.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2419, pruned_loss=0.04825, over 954975.18 frames. ], batch size: 41, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:21:45,828 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.274e+01 1.443e+02 1.675e+02 2.129e+02 3.494e+02, threshold=3.349e+02, percent-clipped=0.0 +2023-04-27 23:21:51,788 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=144472.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:22:16,355 INFO [finetune.py:976] (0/7) Epoch 26, batch 1300, loss[loss=0.1955, simple_loss=0.2603, pruned_loss=0.06529, over 4826.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2389, pruned_loss=0.04705, over 956089.02 frames. ], batch size: 39, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:23:06,076 INFO [finetune.py:976] (0/7) Epoch 26, batch 1350, loss[loss=0.1273, simple_loss=0.2088, pruned_loss=0.02292, over 4789.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2394, pruned_loss=0.04748, over 954066.36 frames. ], batch size: 29, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:23:26,215 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.435e+01 1.590e+02 1.903e+02 2.333e+02 4.606e+02, threshold=3.805e+02, percent-clipped=5.0 +2023-04-27 23:23:32,566 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.68 vs. limit=2.0 +2023-04-27 23:23:39,995 INFO [finetune.py:976] (0/7) Epoch 26, batch 1400, loss[loss=0.1541, simple_loss=0.2081, pruned_loss=0.05003, over 4016.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2406, pruned_loss=0.04756, over 952034.11 frames. ], batch size: 17, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:23:42,552 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-27 23:24:12,864 INFO [finetune.py:976] (0/7) Epoch 26, batch 1450, loss[loss=0.2045, simple_loss=0.2722, pruned_loss=0.06843, over 4868.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2434, pruned_loss=0.04904, over 953105.05 frames. ], batch size: 34, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:24:19,407 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-27 23:24:26,559 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.57 vs. limit=2.0 +2023-04-27 23:24:33,310 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.219e+02 1.523e+02 1.914e+02 2.224e+02 4.652e+02, threshold=3.827e+02, percent-clipped=2.0 +2023-04-27 23:24:46,127 INFO [finetune.py:976] (0/7) Epoch 26, batch 1500, loss[loss=0.1473, simple_loss=0.2119, pruned_loss=0.0413, over 4485.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.2441, pruned_loss=0.04864, over 952537.40 frames. ], batch size: 19, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:24:49,922 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.62 vs. limit=5.0 +2023-04-27 23:25:20,118 INFO [finetune.py:976] (0/7) Epoch 26, batch 1550, loss[loss=0.1687, simple_loss=0.2335, pruned_loss=0.05198, over 4866.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2438, pruned_loss=0.04848, over 952991.84 frames. ], batch size: 34, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:25:42,639 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=144767.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:25:45,421 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.907e+01 1.462e+02 1.740e+02 2.100e+02 5.227e+02, threshold=3.480e+02, percent-clipped=2.0 +2023-04-27 23:26:14,645 INFO [finetune.py:976] (0/7) Epoch 26, batch 1600, loss[loss=0.1906, simple_loss=0.2423, pruned_loss=0.0694, over 4252.00 frames. ], tot_loss[loss=0.1695, simple_loss=0.2425, pruned_loss=0.04823, over 951578.24 frames. ], batch size: 65, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:26:31,980 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-27 23:27:00,873 INFO [finetune.py:976] (0/7) Epoch 26, batch 1650, loss[loss=0.2026, simple_loss=0.2722, pruned_loss=0.06646, over 4902.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2414, pruned_loss=0.04833, over 954874.36 frames. ], batch size: 32, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:27:01,624 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4855, 1.7330, 1.9340, 2.0247, 1.9597, 1.9244, 1.9195, 1.8813], + device='cuda:0'), covar=tensor([0.3757, 0.5958, 0.4459, 0.4175, 0.5116, 0.7000, 0.5830, 0.5265], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0377, 0.0330, 0.0341, 0.0350, 0.0397, 0.0362, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 23:27:14,573 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-27 23:27:20,926 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.028e+02 1.459e+02 1.734e+02 2.287e+02 5.147e+02, threshold=3.469e+02, percent-clipped=2.0 +2023-04-27 23:27:25,721 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2923, 1.3131, 1.4006, 1.6218, 1.6387, 1.2915, 1.0321, 1.5152], + device='cuda:0'), covar=tensor([0.0865, 0.1359, 0.0929, 0.0595, 0.0663, 0.0808, 0.0783, 0.0577], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0204, 0.0187, 0.0173, 0.0178, 0.0179, 0.0152, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 23:27:39,743 INFO [finetune.py:976] (0/7) Epoch 26, batch 1700, loss[loss=0.1827, simple_loss=0.2512, pruned_loss=0.0571, over 4759.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2391, pruned_loss=0.0472, over 957075.90 frames. ], batch size: 54, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:28:09,682 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6713, 1.6533, 1.6832, 1.2630, 1.8435, 1.4156, 2.3191, 1.5136], + device='cuda:0'), covar=tensor([0.3625, 0.1938, 0.4630, 0.2811, 0.1555, 0.2355, 0.1412, 0.4452], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0349, 0.0423, 0.0349, 0.0380, 0.0373, 0.0363, 0.0420], + device='cuda:0'), out_proj_covar=tensor([9.9213e-05, 1.0407e-04, 1.2797e-04, 1.0459e-04, 1.1287e-04, 1.1081e-04, + 1.0633e-04, 1.2641e-04], device='cuda:0') +2023-04-27 23:28:44,465 INFO [finetune.py:976] (0/7) Epoch 26, batch 1750, loss[loss=0.1885, simple_loss=0.2645, pruned_loss=0.05626, over 4850.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2417, pruned_loss=0.04795, over 957358.31 frames. ], batch size: 49, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:29:25,331 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.536e+02 1.830e+02 2.200e+02 7.306e+02, threshold=3.661e+02, percent-clipped=1.0 +2023-04-27 23:29:25,469 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7133, 2.3234, 1.9669, 1.8807, 1.2366, 1.3636, 2.0836, 1.2627], + device='cuda:0'), covar=tensor([0.1674, 0.1461, 0.1359, 0.1596, 0.2355, 0.1955, 0.0832, 0.2087], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0209, 0.0169, 0.0204, 0.0201, 0.0185, 0.0156, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 23:29:50,039 INFO [finetune.py:976] (0/7) Epoch 26, batch 1800, loss[loss=0.1571, simple_loss=0.233, pruned_loss=0.0406, over 4880.00 frames. ], tot_loss[loss=0.1721, simple_loss=0.2457, pruned_loss=0.04926, over 957398.61 frames. ], batch size: 32, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:29:53,360 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=144996.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:30:15,071 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145027.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:30:24,557 INFO [finetune.py:976] (0/7) Epoch 26, batch 1850, loss[loss=0.1865, simple_loss=0.2549, pruned_loss=0.05911, over 4795.00 frames. ], tot_loss[loss=0.1713, simple_loss=0.2448, pruned_loss=0.04885, over 955608.65 frames. ], batch size: 45, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:30:34,393 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145057.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 23:30:41,230 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145066.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:30:41,816 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145067.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:30:44,632 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.003e+02 1.513e+02 1.816e+02 2.184e+02 4.128e+02, threshold=3.632e+02, percent-clipped=2.0 +2023-04-27 23:30:55,902 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145088.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:30:58,303 INFO [finetune.py:976] (0/7) Epoch 26, batch 1900, loss[loss=0.1884, simple_loss=0.2613, pruned_loss=0.0577, over 4888.00 frames. ], tot_loss[loss=0.1704, simple_loss=0.2445, pruned_loss=0.04814, over 957550.87 frames. ], batch size: 43, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:31:13,796 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=145115.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:31:15,647 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5054, 1.9966, 2.3862, 2.9837, 2.3784, 1.9218, 1.9130, 2.2733], + device='cuda:0'), covar=tensor([0.2722, 0.3028, 0.1595, 0.2077, 0.2512, 0.2412, 0.3550, 0.2018], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0244, 0.0226, 0.0312, 0.0220, 0.0234, 0.0225, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 23:31:22,155 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145127.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:31:32,057 INFO [finetune.py:976] (0/7) Epoch 26, batch 1950, loss[loss=0.1691, simple_loss=0.2235, pruned_loss=0.05732, over 4847.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2442, pruned_loss=0.04867, over 956132.12 frames. ], batch size: 49, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:31:40,587 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1231, 2.5613, 1.0324, 1.5172, 1.9881, 1.1950, 3.3922, 1.7341], + device='cuda:0'), covar=tensor([0.0675, 0.0625, 0.0836, 0.1148, 0.0493, 0.1002, 0.0222, 0.0622], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 23:32:03,279 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.496e+01 1.516e+02 2.001e+02 2.404e+02 4.376e+02, threshold=4.002e+02, percent-clipped=4.0 +2023-04-27 23:32:23,670 INFO [finetune.py:976] (0/7) Epoch 26, batch 2000, loss[loss=0.178, simple_loss=0.2528, pruned_loss=0.05156, over 4828.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2409, pruned_loss=0.04798, over 955543.34 frames. ], batch size: 39, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:32:48,364 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-27 23:33:03,265 INFO [finetune.py:976] (0/7) Epoch 26, batch 2050, loss[loss=0.1503, simple_loss=0.2264, pruned_loss=0.03711, over 4914.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2376, pruned_loss=0.04669, over 955454.18 frames. ], batch size: 36, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:33:09,549 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-27 23:33:43,224 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.513e+02 1.827e+02 2.273e+02 3.950e+02, threshold=3.653e+02, percent-clipped=0.0 +2023-04-27 23:33:58,463 INFO [finetune.py:976] (0/7) Epoch 26, batch 2100, loss[loss=0.2101, simple_loss=0.2789, pruned_loss=0.07066, over 4857.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2374, pruned_loss=0.04693, over 955428.90 frames. ], batch size: 49, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:34:03,312 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2730, 3.4106, 0.7986, 1.6106, 1.4816, 2.1840, 1.8515, 1.1196], + device='cuda:0'), covar=tensor([0.1904, 0.1483, 0.2491, 0.1832, 0.1414, 0.1474, 0.1782, 0.2201], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0238, 0.0135, 0.0120, 0.0131, 0.0152, 0.0117, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 23:34:12,987 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.60 vs. limit=2.0 +2023-04-27 23:34:32,514 INFO [finetune.py:976] (0/7) Epoch 26, batch 2150, loss[loss=0.1287, simple_loss=0.2131, pruned_loss=0.02212, over 4784.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2392, pruned_loss=0.0469, over 954105.00 frames. ], batch size: 29, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:34:39,178 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145352.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 23:34:51,070 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.081e+02 1.597e+02 2.010e+02 2.319e+02 4.178e+02, threshold=4.020e+02, percent-clipped=2.0 +2023-04-27 23:34:51,181 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145371.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:34:59,915 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145383.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:35:10,686 INFO [finetune.py:976] (0/7) Epoch 26, batch 2200, loss[loss=0.1866, simple_loss=0.2694, pruned_loss=0.05191, over 4809.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2426, pruned_loss=0.04755, over 956057.75 frames. ], batch size: 40, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:35:24,184 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-27 23:35:30,056 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145422.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:35:37,096 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145432.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:35:37,794 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.32 vs. limit=2.0 +2023-04-27 23:35:44,057 INFO [finetune.py:976] (0/7) Epoch 26, batch 2250, loss[loss=0.1801, simple_loss=0.2505, pruned_loss=0.05486, over 4832.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2427, pruned_loss=0.04779, over 953951.09 frames. ], batch size: 49, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:36:03,178 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.017e+02 1.640e+02 1.880e+02 2.244e+02 3.153e+02, threshold=3.761e+02, percent-clipped=0.0 +2023-04-27 23:36:17,802 INFO [finetune.py:976] (0/7) Epoch 26, batch 2300, loss[loss=0.1843, simple_loss=0.246, pruned_loss=0.06135, over 4811.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2423, pruned_loss=0.04708, over 954747.96 frames. ], batch size: 39, lr: 2.97e-03, grad_scale: 32.0 +2023-04-27 23:36:19,159 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9578, 1.9705, 1.8901, 1.6206, 1.9806, 1.6766, 2.5992, 1.4959], + device='cuda:0'), covar=tensor([0.3444, 0.1715, 0.3936, 0.2740, 0.1901, 0.2514, 0.1328, 0.4392], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0354, 0.0429, 0.0354, 0.0387, 0.0377, 0.0371, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 23:36:26,368 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-27 23:36:51,030 INFO [finetune.py:976] (0/7) Epoch 26, batch 2350, loss[loss=0.1674, simple_loss=0.2379, pruned_loss=0.04845, over 4827.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2405, pruned_loss=0.04654, over 954565.82 frames. ], batch size: 39, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:37:06,532 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5844, 3.0430, 1.0209, 1.7730, 2.4212, 1.4973, 4.1942, 2.0593], + device='cuda:0'), covar=tensor([0.0578, 0.0901, 0.0896, 0.1193, 0.0467, 0.0951, 0.0189, 0.0562], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 23:37:10,060 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.053e+02 1.548e+02 1.895e+02 2.111e+02 3.968e+02, threshold=3.791e+02, percent-clipped=2.0 +2023-04-27 23:37:33,057 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9485, 2.4311, 1.8959, 1.8344, 1.4528, 1.4711, 1.9906, 1.3975], + device='cuda:0'), covar=tensor([0.1492, 0.1232, 0.1300, 0.1562, 0.2027, 0.1805, 0.0881, 0.1894], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0211, 0.0169, 0.0203, 0.0200, 0.0186, 0.0157, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 23:37:40,488 INFO [finetune.py:976] (0/7) Epoch 26, batch 2400, loss[loss=0.1845, simple_loss=0.2526, pruned_loss=0.05816, over 4846.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2379, pruned_loss=0.04626, over 955372.05 frames. ], batch size: 47, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:37:41,804 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145594.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:38:17,098 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0799, 1.7772, 2.2847, 2.3662, 2.1205, 2.0387, 2.1808, 2.1646], + device='cuda:0'), covar=tensor([0.4947, 0.7688, 0.7011, 0.6293, 0.6504, 0.9209, 0.9255, 1.0824], + device='cuda:0'), in_proj_covar=tensor([0.0440, 0.0422, 0.0516, 0.0507, 0.0470, 0.0504, 0.0508, 0.0519], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 23:38:46,683 INFO [finetune.py:976] (0/7) Epoch 26, batch 2450, loss[loss=0.1599, simple_loss=0.2358, pruned_loss=0.04197, over 4871.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2358, pruned_loss=0.04559, over 955887.75 frames. ], batch size: 31, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:38:47,939 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145643.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:39:00,515 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145652.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 23:39:07,850 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145655.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 23:39:30,139 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.495e+02 1.759e+02 2.001e+02 3.629e+02, threshold=3.517e+02, percent-clipped=0.0 +2023-04-27 23:39:35,250 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.51 vs. limit=5.0 +2023-04-27 23:39:43,008 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145683.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:39:48,404 INFO [finetune.py:976] (0/7) Epoch 26, batch 2500, loss[loss=0.1516, simple_loss=0.2347, pruned_loss=0.03431, over 4896.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2361, pruned_loss=0.04569, over 956108.09 frames. ], batch size: 32, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:39:49,122 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6485, 1.7063, 0.7542, 1.3073, 1.8636, 1.4807, 1.3753, 1.4846], + device='cuda:0'), covar=tensor([0.0461, 0.0363, 0.0330, 0.0559, 0.0247, 0.0503, 0.0479, 0.0537], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0038, 0.0053, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 23:39:54,790 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=145700.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:39:57,792 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145704.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:40:07,862 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8176, 2.5172, 1.8907, 1.7929, 1.3280, 1.3825, 2.0515, 1.3223], + device='cuda:0'), covar=tensor([0.1634, 0.1259, 0.1311, 0.1669, 0.2239, 0.1920, 0.0925, 0.1997], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0211, 0.0170, 0.0204, 0.0201, 0.0187, 0.0157, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 23:40:09,643 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=145722.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:40:12,706 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145727.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:40:15,130 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=145731.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:40:22,234 INFO [finetune.py:976] (0/7) Epoch 26, batch 2550, loss[loss=0.1489, simple_loss=0.217, pruned_loss=0.0404, over 4704.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2379, pruned_loss=0.04586, over 954632.11 frames. ], batch size: 23, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:40:41,727 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=145770.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:40:42,274 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.201e+01 1.571e+02 1.844e+02 2.174e+02 6.257e+02, threshold=3.689e+02, percent-clipped=2.0 +2023-04-27 23:40:56,104 INFO [finetune.py:976] (0/7) Epoch 26, batch 2600, loss[loss=0.1421, simple_loss=0.2178, pruned_loss=0.03324, over 4884.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2414, pruned_loss=0.04707, over 955110.63 frames. ], batch size: 32, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:41:06,742 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6244, 3.5782, 1.1048, 1.8617, 1.9467, 2.5224, 2.0061, 1.0193], + device='cuda:0'), covar=tensor([0.1376, 0.1026, 0.1961, 0.1312, 0.1051, 0.1075, 0.1529, 0.1935], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0236, 0.0135, 0.0120, 0.0130, 0.0151, 0.0116, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 23:41:16,299 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=145821.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 23:41:29,915 INFO [finetune.py:976] (0/7) Epoch 26, batch 2650, loss[loss=0.1614, simple_loss=0.2509, pruned_loss=0.03594, over 4774.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2428, pruned_loss=0.04762, over 954172.06 frames. ], batch size: 28, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:41:47,057 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.49 vs. limit=5.0 +2023-04-27 23:41:49,916 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.145e+02 1.525e+02 1.770e+02 2.195e+02 3.362e+02, threshold=3.540e+02, percent-clipped=0.0 +2023-04-27 23:41:57,240 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=145882.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 23:42:01,573 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-27 23:42:03,155 INFO [finetune.py:976] (0/7) Epoch 26, batch 2700, loss[loss=0.1541, simple_loss=0.2258, pruned_loss=0.04114, over 4921.00 frames. ], tot_loss[loss=0.168, simple_loss=0.242, pruned_loss=0.04701, over 954166.01 frames. ], batch size: 38, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:42:36,792 INFO [finetune.py:976] (0/7) Epoch 26, batch 2750, loss[loss=0.1711, simple_loss=0.2373, pruned_loss=0.0524, over 4900.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2388, pruned_loss=0.04603, over 955281.68 frames. ], batch size: 32, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:42:42,209 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145950.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 23:42:56,798 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.020e+02 1.527e+02 1.797e+02 2.320e+02 4.826e+02, threshold=3.594e+02, percent-clipped=4.0 +2023-04-27 23:43:03,795 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.72 vs. limit=2.0 +2023-04-27 23:43:10,065 INFO [finetune.py:976] (0/7) Epoch 26, batch 2800, loss[loss=0.1654, simple_loss=0.2281, pruned_loss=0.0514, over 4901.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2366, pruned_loss=0.04557, over 956896.23 frames. ], batch size: 32, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:43:14,389 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=145999.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:43:15,085 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-146000.pt +2023-04-27 23:43:29,610 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.92 vs. limit=2.0 +2023-04-27 23:43:35,188 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146027.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:43:44,745 INFO [finetune.py:976] (0/7) Epoch 26, batch 2850, loss[loss=0.1867, simple_loss=0.2474, pruned_loss=0.06299, over 4721.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.236, pruned_loss=0.04542, over 956829.52 frames. ], batch size: 23, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:44:03,316 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-27 23:44:22,215 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.964e+01 1.598e+02 1.831e+02 2.098e+02 3.575e+02, threshold=3.662e+02, percent-clipped=0.0 +2023-04-27 23:44:25,238 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=146075.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:44:43,959 INFO [finetune.py:976] (0/7) Epoch 26, batch 2900, loss[loss=0.1448, simple_loss=0.2182, pruned_loss=0.03568, over 4787.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2374, pruned_loss=0.04571, over 957567.26 frames. ], batch size: 29, lr: 2.96e-03, grad_scale: 64.0 +2023-04-27 23:45:49,355 INFO [finetune.py:976] (0/7) Epoch 26, batch 2950, loss[loss=0.1551, simple_loss=0.2324, pruned_loss=0.03885, over 4818.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2399, pruned_loss=0.04588, over 959414.55 frames. ], batch size: 39, lr: 2.96e-03, grad_scale: 64.0 +2023-04-27 23:45:59,489 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146149.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:46:11,510 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0665, 2.3781, 2.0996, 2.3733, 1.6794, 2.1046, 2.1990, 1.8827], + device='cuda:0'), covar=tensor([0.1577, 0.0968, 0.0685, 0.0954, 0.2973, 0.0880, 0.1428, 0.1893], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0299, 0.0215, 0.0276, 0.0314, 0.0254, 0.0248, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1373e-04, 1.1792e-04, 8.4636e-05, 1.0860e-04, 1.2657e-04, 9.9862e-05, + 9.9947e-05, 1.0382e-04], device='cuda:0') +2023-04-27 23:46:13,680 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.156e+02 1.553e+02 1.780e+02 2.193e+02 5.128e+02, threshold=3.559e+02, percent-clipped=3.0 +2023-04-27 23:46:17,216 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1255, 2.5619, 2.1118, 2.5201, 1.7365, 2.2307, 2.2063, 1.7048], + device='cuda:0'), covar=tensor([0.2133, 0.1316, 0.0820, 0.1056, 0.3267, 0.1081, 0.2006, 0.2665], + device='cuda:0'), in_proj_covar=tensor([0.0285, 0.0299, 0.0215, 0.0276, 0.0314, 0.0254, 0.0248, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1368e-04, 1.1784e-04, 8.4600e-05, 1.0853e-04, 1.2649e-04, 9.9761e-05, + 9.9895e-05, 1.0372e-04], device='cuda:0') +2023-04-27 23:46:18,392 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146177.0, num_to_drop=1, layers_to_drop={3} +2023-04-27 23:46:28,438 INFO [finetune.py:976] (0/7) Epoch 26, batch 3000, loss[loss=0.1723, simple_loss=0.2462, pruned_loss=0.04925, over 4921.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2404, pruned_loss=0.04559, over 958507.13 frames. ], batch size: 33, lr: 2.96e-03, grad_scale: 64.0 +2023-04-27 23:46:28,439 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-27 23:46:38,927 INFO [finetune.py:1010] (0/7) Epoch 26, validation: loss=0.1526, simple_loss=0.2216, pruned_loss=0.04183, over 2265189.00 frames. +2023-04-27 23:46:38,927 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-27 23:46:50,698 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146210.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:46:53,211 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.53 vs. limit=2.0 +2023-04-27 23:47:11,580 INFO [finetune.py:976] (0/7) Epoch 26, batch 3050, loss[loss=0.1326, simple_loss=0.2053, pruned_loss=0.02993, over 4697.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2416, pruned_loss=0.04585, over 958347.24 frames. ], batch size: 23, lr: 2.96e-03, grad_scale: 64.0 +2023-04-27 23:47:17,397 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146250.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:47:27,536 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1091, 2.4892, 2.0747, 2.3801, 1.7445, 2.1925, 2.1590, 1.6135], + device='cuda:0'), covar=tensor([0.2059, 0.1266, 0.0917, 0.1219, 0.3276, 0.1071, 0.2008, 0.2692], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0299, 0.0215, 0.0275, 0.0313, 0.0253, 0.0247, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1342e-04, 1.1760e-04, 8.4519e-05, 1.0844e-04, 1.2605e-04, 9.9637e-05, + 9.9717e-05, 1.0362e-04], device='cuda:0') +2023-04-27 23:47:31,020 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.111e+02 1.541e+02 1.745e+02 2.066e+02 6.833e+02, threshold=3.490e+02, percent-clipped=2.0 +2023-04-27 23:47:45,143 INFO [finetune.py:976] (0/7) Epoch 26, batch 3100, loss[loss=0.1647, simple_loss=0.2358, pruned_loss=0.0468, over 4904.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2405, pruned_loss=0.04611, over 956305.92 frames. ], batch size: 36, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:47:49,793 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=146298.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:47:50,448 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146299.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:48:18,966 INFO [finetune.py:976] (0/7) Epoch 26, batch 3150, loss[loss=0.1522, simple_loss=0.2226, pruned_loss=0.04084, over 4738.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2388, pruned_loss=0.0461, over 955952.24 frames. ], batch size: 59, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:48:21,499 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6629, 1.6791, 1.7956, 1.9811, 2.0248, 1.6096, 1.2446, 1.8721], + device='cuda:0'), covar=tensor([0.0761, 0.1062, 0.0692, 0.0519, 0.0611, 0.0824, 0.0761, 0.0526], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0202, 0.0184, 0.0170, 0.0176, 0.0177, 0.0149, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 23:48:22,565 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=146347.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:48:26,674 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1585, 1.4498, 1.2788, 1.8797, 1.6210, 1.7474, 1.4278, 3.3810], + device='cuda:0'), covar=tensor([0.0641, 0.0839, 0.0886, 0.1129, 0.0686, 0.0473, 0.0771, 0.0191], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-27 23:48:38,469 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.566e+02 1.850e+02 2.179e+02 3.570e+02, threshold=3.699e+02, percent-clipped=2.0 +2023-04-27 23:48:51,066 INFO [finetune.py:976] (0/7) Epoch 26, batch 3200, loss[loss=0.1586, simple_loss=0.2319, pruned_loss=0.04262, over 4789.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2361, pruned_loss=0.04554, over 956060.39 frames. ], batch size: 29, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:48:51,288 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.66 vs. limit=2.0 +2023-04-27 23:48:59,738 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.6835, 3.6961, 2.8258, 4.2607, 3.7036, 3.7424, 1.8917, 3.6708], + device='cuda:0'), covar=tensor([0.1832, 0.1144, 0.2926, 0.1837, 0.2620, 0.1755, 0.5417, 0.2298], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0220, 0.0253, 0.0306, 0.0301, 0.0247, 0.0275, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-27 23:49:05,529 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7248, 0.7372, 1.5797, 2.0821, 1.7817, 1.6013, 1.6469, 1.5836], + device='cuda:0'), covar=tensor([0.4066, 0.5890, 0.5606, 0.5098, 0.5225, 0.6912, 0.6287, 0.7706], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0422, 0.0515, 0.0507, 0.0470, 0.0506, 0.0506, 0.0518], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 23:49:24,698 INFO [finetune.py:976] (0/7) Epoch 26, batch 3250, loss[loss=0.2045, simple_loss=0.2711, pruned_loss=0.06896, over 4145.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2363, pruned_loss=0.04545, over 954693.77 frames. ], batch size: 65, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:49:34,858 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2618, 1.7302, 2.2343, 2.6595, 2.1489, 1.6887, 1.4045, 1.9789], + device='cuda:0'), covar=tensor([0.3006, 0.2885, 0.1608, 0.2132, 0.2508, 0.2454, 0.3981, 0.2014], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0244, 0.0226, 0.0311, 0.0220, 0.0233, 0.0226, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 23:49:38,410 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.53 vs. limit=2.0 +2023-04-27 23:49:45,355 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.653e+01 1.477e+02 1.784e+02 2.151e+02 4.577e+02, threshold=3.568e+02, percent-clipped=2.0 +2023-04-27 23:49:53,883 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146477.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 23:50:13,001 INFO [finetune.py:976] (0/7) Epoch 26, batch 3300, loss[loss=0.1306, simple_loss=0.2093, pruned_loss=0.02592, over 4705.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2382, pruned_loss=0.04585, over 953119.33 frames. ], batch size: 23, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:50:28,261 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146505.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:50:57,307 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=146525.0, num_to_drop=1, layers_to_drop={1} +2023-04-27 23:51:19,417 INFO [finetune.py:976] (0/7) Epoch 26, batch 3350, loss[loss=0.1595, simple_loss=0.2374, pruned_loss=0.04079, over 4788.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2418, pruned_loss=0.04704, over 952484.74 frames. ], batch size: 51, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:51:36,937 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0903, 2.6314, 1.0841, 1.4698, 2.0522, 1.2310, 3.2594, 1.5983], + device='cuda:0'), covar=tensor([0.0665, 0.0800, 0.0857, 0.1143, 0.0482, 0.0955, 0.0227, 0.0642], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-27 23:51:45,685 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.686e+01 1.611e+02 1.865e+02 2.186e+02 4.215e+02, threshold=3.729e+02, percent-clipped=3.0 +2023-04-27 23:51:57,878 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146591.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:51:58,396 INFO [finetune.py:976] (0/7) Epoch 26, batch 3400, loss[loss=0.1972, simple_loss=0.264, pruned_loss=0.06521, over 4894.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2436, pruned_loss=0.04759, over 952942.05 frames. ], batch size: 36, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:52:27,922 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146626.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:52:43,201 INFO [finetune.py:976] (0/7) Epoch 26, batch 3450, loss[loss=0.1577, simple_loss=0.2256, pruned_loss=0.04492, over 4873.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2425, pruned_loss=0.04683, over 953870.10 frames. ], batch size: 31, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:52:49,801 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146652.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:53:04,800 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.587e+02 1.818e+02 2.131e+02 4.084e+02, threshold=3.635e+02, percent-clipped=1.0 +2023-04-27 23:53:05,590 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-27 23:53:13,331 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1154, 1.2224, 4.9480, 4.6218, 4.1786, 4.7517, 4.4343, 4.3212], + device='cuda:0'), covar=tensor([0.6611, 0.6845, 0.0974, 0.1897, 0.1272, 0.1051, 0.1250, 0.1679], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0309, 0.0409, 0.0410, 0.0349, 0.0414, 0.0319, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-27 23:53:13,984 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146687.0, num_to_drop=1, layers_to_drop={2} +2023-04-27 23:53:16,859 INFO [finetune.py:976] (0/7) Epoch 26, batch 3500, loss[loss=0.1618, simple_loss=0.235, pruned_loss=0.04425, over 4758.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2403, pruned_loss=0.04637, over 954482.03 frames. ], batch size: 28, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:53:16,996 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7000, 2.2452, 2.6655, 3.2818, 2.5099, 2.0488, 2.2599, 2.5099], + device='cuda:0'), covar=tensor([0.3253, 0.3006, 0.1615, 0.2165, 0.2724, 0.2711, 0.3351, 0.1990], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0243, 0.0226, 0.0311, 0.0219, 0.0233, 0.0225, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-27 23:53:24,968 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.91 vs. limit=2.0 +2023-04-27 23:53:50,646 INFO [finetune.py:976] (0/7) Epoch 26, batch 3550, loss[loss=0.1603, simple_loss=0.2351, pruned_loss=0.04271, over 4907.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2371, pruned_loss=0.04553, over 955312.56 frames. ], batch size: 36, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:54:11,377 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.892e+01 1.463e+02 1.726e+02 2.142e+02 5.887e+02, threshold=3.452e+02, percent-clipped=4.0 +2023-04-27 23:54:24,571 INFO [finetune.py:976] (0/7) Epoch 26, batch 3600, loss[loss=0.1416, simple_loss=0.2251, pruned_loss=0.0291, over 4858.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2345, pruned_loss=0.04475, over 954240.68 frames. ], batch size: 44, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:54:32,794 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=146805.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:54:53,214 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.53 vs. limit=5.0 +2023-04-27 23:54:59,555 INFO [finetune.py:976] (0/7) Epoch 26, batch 3650, loss[loss=0.1806, simple_loss=0.254, pruned_loss=0.05364, over 4920.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2358, pruned_loss=0.04487, over 955863.62 frames. ], batch size: 42, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:55:00,923 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=146844.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:55:11,055 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6066, 1.5822, 0.7418, 1.3088, 1.6655, 1.4828, 1.3823, 1.3803], + device='cuda:0'), covar=tensor([0.0499, 0.0378, 0.0362, 0.0583, 0.0286, 0.0539, 0.0528, 0.0601], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0038, 0.0053, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 23:55:11,623 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=146853.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:55:22,449 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0520, 1.6159, 1.3959, 1.9564, 2.0946, 1.7942, 1.7572, 1.4138], + device='cuda:0'), covar=tensor([0.1977, 0.1660, 0.1776, 0.1557, 0.1486, 0.1957, 0.2266, 0.2488], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0314, 0.0357, 0.0290, 0.0332, 0.0308, 0.0303, 0.0379], + device='cuda:0'), out_proj_covar=tensor([6.5210e-05, 6.4586e-05, 7.5009e-05, 5.8103e-05, 6.7933e-05, 6.4340e-05, + 6.2700e-05, 8.0398e-05], device='cuda:0') +2023-04-27 23:55:30,101 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.832e+01 1.619e+02 1.915e+02 2.507e+02 7.747e+02, threshold=3.830e+02, percent-clipped=2.0 +2023-04-27 23:55:36,011 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5193, 1.9675, 1.7651, 2.3697, 2.4868, 2.0460, 2.0882, 1.7318], + device='cuda:0'), covar=tensor([0.1592, 0.1677, 0.1926, 0.1437, 0.1069, 0.1971, 0.2056, 0.2525], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0315, 0.0357, 0.0291, 0.0332, 0.0309, 0.0303, 0.0380], + device='cuda:0'), out_proj_covar=tensor([6.5352e-05, 6.4729e-05, 7.5155e-05, 5.8205e-05, 6.8084e-05, 6.4442e-05, + 6.2827e-05, 8.0554e-05], device='cuda:0') +2023-04-27 23:55:48,369 INFO [finetune.py:976] (0/7) Epoch 26, batch 3700, loss[loss=0.183, simple_loss=0.258, pruned_loss=0.05394, over 4812.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2382, pruned_loss=0.04602, over 953586.76 frames. ], batch size: 38, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:56:01,839 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=146905.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:56:07,502 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1513, 2.7309, 2.1015, 2.2603, 1.5549, 1.5639, 2.2640, 1.4723], + device='cuda:0'), covar=tensor([0.1427, 0.1190, 0.1209, 0.1314, 0.2046, 0.1745, 0.0840, 0.1876], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0211, 0.0170, 0.0204, 0.0200, 0.0186, 0.0157, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 23:56:36,153 INFO [finetune.py:976] (0/7) Epoch 26, batch 3750, loss[loss=0.1572, simple_loss=0.231, pruned_loss=0.04175, over 4904.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2409, pruned_loss=0.04759, over 952219.67 frames. ], batch size: 46, lr: 2.96e-03, grad_scale: 32.0 +2023-04-27 23:56:44,462 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146947.0, num_to_drop=0, layers_to_drop=set() +2023-04-27 23:57:06,090 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.923e+01 1.552e+02 1.944e+02 2.307e+02 4.306e+02, threshold=3.888e+02, percent-clipped=2.0 +2023-04-27 23:57:13,808 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=146982.0, num_to_drop=1, layers_to_drop={0} +2023-04-27 23:57:13,909 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.23 vs. limit=5.0 +2023-04-27 23:57:20,816 INFO [finetune.py:976] (0/7) Epoch 26, batch 3800, loss[loss=0.1694, simple_loss=0.2484, pruned_loss=0.04521, over 4919.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2422, pruned_loss=0.04817, over 953892.21 frames. ], batch size: 33, lr: 2.95e-03, grad_scale: 32.0 +2023-04-27 23:58:02,624 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2123, 2.7326, 2.1910, 2.1983, 1.6171, 1.5263, 2.3414, 1.6381], + device='cuda:0'), covar=tensor([0.1638, 0.1390, 0.1303, 0.1603, 0.2298, 0.1875, 0.0929, 0.1990], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0212, 0.0170, 0.0204, 0.0200, 0.0187, 0.0157, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 23:58:05,507 INFO [finetune.py:976] (0/7) Epoch 26, batch 3850, loss[loss=0.1445, simple_loss=0.2122, pruned_loss=0.03845, over 4938.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2414, pruned_loss=0.04801, over 953667.92 frames. ], batch size: 33, lr: 2.95e-03, grad_scale: 32.0 +2023-04-27 23:58:14,195 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6053, 1.7977, 0.8807, 1.3350, 1.7631, 1.5068, 1.4267, 1.4521], + device='cuda:0'), covar=tensor([0.0465, 0.0339, 0.0324, 0.0539, 0.0259, 0.0485, 0.0462, 0.0537], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0038, 0.0053, 0.0039, 0.0050, 0.0050, 0.0052], + device='cuda:0') +2023-04-27 23:58:18,468 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1863, 4.3908, 0.8719, 2.1009, 2.6690, 2.8118, 2.5044, 0.8945], + device='cuda:0'), covar=tensor([0.1193, 0.0673, 0.2111, 0.1257, 0.0867, 0.1038, 0.1379, 0.2095], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0239, 0.0137, 0.0122, 0.0132, 0.0154, 0.0118, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-27 23:58:23,781 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.597e+02 1.804e+02 2.116e+02 3.825e+02, threshold=3.609e+02, percent-clipped=0.0 +2023-04-27 23:58:39,180 INFO [finetune.py:976] (0/7) Epoch 26, batch 3900, loss[loss=0.1533, simple_loss=0.2291, pruned_loss=0.03868, over 4910.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.239, pruned_loss=0.04707, over 954985.88 frames. ], batch size: 35, lr: 2.95e-03, grad_scale: 32.0 +2023-04-27 23:58:50,317 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7834, 2.0772, 1.7288, 1.5291, 1.3638, 1.3432, 1.7563, 1.3012], + device='cuda:0'), covar=tensor([0.1656, 0.1256, 0.1374, 0.1635, 0.2197, 0.1983, 0.1010, 0.2046], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0212, 0.0170, 0.0205, 0.0201, 0.0187, 0.0157, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-27 23:59:12,140 INFO [finetune.py:976] (0/7) Epoch 26, batch 3950, loss[loss=0.1658, simple_loss=0.2305, pruned_loss=0.05058, over 4093.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2365, pruned_loss=0.0462, over 956130.43 frames. ], batch size: 17, lr: 2.95e-03, grad_scale: 32.0 +2023-04-27 23:59:31,440 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.029e+02 1.596e+02 1.912e+02 2.259e+02 3.879e+02, threshold=3.825e+02, percent-clipped=1.0 +2023-04-27 23:59:43,953 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.74 vs. limit=5.0 +2023-04-27 23:59:45,574 INFO [finetune.py:976] (0/7) Epoch 26, batch 4000, loss[loss=0.1284, simple_loss=0.2066, pruned_loss=0.02509, over 4708.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2358, pruned_loss=0.04622, over 951600.89 frames. ], batch size: 23, lr: 2.95e-03, grad_scale: 32.0 +2023-04-27 23:59:51,634 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147200.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:00:18,669 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147241.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:00:19,197 INFO [finetune.py:976] (0/7) Epoch 26, batch 4050, loss[loss=0.1892, simple_loss=0.2658, pruned_loss=0.05629, over 4908.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2403, pruned_loss=0.048, over 951418.89 frames. ], batch size: 36, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:00:23,241 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147247.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:00:44,478 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.656e+02 1.880e+02 2.300e+02 3.890e+02, threshold=3.760e+02, percent-clipped=1.0 +2023-04-28 00:00:55,640 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147282.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:01:10,421 INFO [finetune.py:976] (0/7) Epoch 26, batch 4100, loss[loss=0.1652, simple_loss=0.2464, pruned_loss=0.04203, over 4816.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.242, pruned_loss=0.04794, over 953515.98 frames. ], batch size: 45, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:01:12,315 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=147295.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:01:14,682 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2252, 1.6297, 2.0838, 2.5711, 2.2302, 1.6890, 1.7980, 1.9703], + device='cuda:0'), covar=tensor([0.3322, 0.3676, 0.1744, 0.2878, 0.2566, 0.2701, 0.4039, 0.2247], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0247, 0.0229, 0.0315, 0.0223, 0.0236, 0.0229, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 00:01:23,315 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147302.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:01:34,361 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147310.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:01:54,619 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=147330.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:02:13,693 INFO [finetune.py:976] (0/7) Epoch 26, batch 4150, loss[loss=0.2052, simple_loss=0.2791, pruned_loss=0.06564, over 4924.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2431, pruned_loss=0.04824, over 954118.20 frames. ], batch size: 38, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:02:24,132 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-28 00:02:56,073 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147371.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:02:56,541 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.010e+02 1.642e+02 1.885e+02 2.342e+02 4.093e+02, threshold=3.770e+02, percent-clipped=2.0 +2023-04-28 00:03:09,328 INFO [finetune.py:976] (0/7) Epoch 26, batch 4200, loss[loss=0.1613, simple_loss=0.234, pruned_loss=0.04428, over 4798.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2419, pruned_loss=0.04737, over 954792.31 frames. ], batch size: 45, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:03:21,129 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147407.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:03:31,822 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7391, 1.1792, 1.7963, 2.2142, 1.8066, 1.7053, 1.7419, 1.7358], + device='cuda:0'), covar=tensor([0.4085, 0.6689, 0.5638, 0.5253, 0.5693, 0.7361, 0.7620, 0.8706], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0421, 0.0514, 0.0506, 0.0468, 0.0503, 0.0506, 0.0518], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:03:33,154 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-28 00:03:37,885 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147434.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:03:42,686 INFO [finetune.py:976] (0/7) Epoch 26, batch 4250, loss[loss=0.1503, simple_loss=0.2301, pruned_loss=0.03526, over 4824.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2403, pruned_loss=0.04683, over 956060.99 frames. ], batch size: 40, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:04:01,799 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147468.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:04:04,153 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.178e+01 1.415e+02 1.781e+02 2.171e+02 8.734e+02, threshold=3.562e+02, percent-clipped=6.0 +2023-04-28 00:04:07,419 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.89 vs. limit=5.0 +2023-04-28 00:04:16,308 INFO [finetune.py:976] (0/7) Epoch 26, batch 4300, loss[loss=0.1254, simple_loss=0.1936, pruned_loss=0.02858, over 4723.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2375, pruned_loss=0.04633, over 955968.84 frames. ], batch size: 54, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:04:17,768 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-28 00:04:18,249 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147495.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:04:22,182 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147500.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:04:49,577 INFO [finetune.py:976] (0/7) Epoch 26, batch 4350, loss[loss=0.1592, simple_loss=0.2169, pruned_loss=0.05081, over 4820.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2351, pruned_loss=0.04547, over 957109.26 frames. ], batch size: 30, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:04:53,339 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=147548.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:05:10,810 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.116e+01 1.521e+02 1.686e+02 2.044e+02 4.229e+02, threshold=3.372e+02, percent-clipped=1.0 +2023-04-28 00:05:22,606 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-28 00:05:23,015 INFO [finetune.py:976] (0/7) Epoch 26, batch 4400, loss[loss=0.1854, simple_loss=0.2589, pruned_loss=0.05596, over 4824.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2358, pruned_loss=0.04526, over 958223.83 frames. ], batch size: 39, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:05:23,784 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6136, 1.9407, 2.0446, 2.0948, 1.9611, 2.0626, 2.1209, 2.0794], + device='cuda:0'), covar=tensor([0.3900, 0.5540, 0.4498, 0.4396, 0.5334, 0.6692, 0.5153, 0.4815], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0375, 0.0329, 0.0340, 0.0351, 0.0395, 0.0360, 0.0331], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 00:05:26,138 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147597.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:05:31,683 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3597, 3.0119, 0.8885, 1.6509, 1.7348, 2.1332, 1.7792, 0.9978], + device='cuda:0'), covar=tensor([0.1378, 0.0860, 0.1719, 0.1253, 0.1057, 0.0926, 0.1567, 0.1925], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0238, 0.0136, 0.0121, 0.0132, 0.0153, 0.0118, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 00:05:56,085 INFO [finetune.py:976] (0/7) Epoch 26, batch 4450, loss[loss=0.1739, simple_loss=0.255, pruned_loss=0.04637, over 4820.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2391, pruned_loss=0.04586, over 957927.75 frames. ], batch size: 40, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:06:07,000 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-28 00:06:17,546 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147659.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:06:21,902 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-28 00:06:22,299 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147666.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:06:29,331 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6309, 1.3209, 4.0609, 3.7989, 3.5544, 3.8160, 3.7535, 3.5506], + device='cuda:0'), covar=tensor([0.7067, 0.5846, 0.0914, 0.1520, 0.1277, 0.1704, 0.2192, 0.1534], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0309, 0.0407, 0.0409, 0.0349, 0.0413, 0.0318, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:06:31,475 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.516e+02 1.781e+02 2.208e+02 3.417e+02, threshold=3.563e+02, percent-clipped=1.0 +2023-04-28 00:06:42,975 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2661, 1.5574, 1.4135, 1.8349, 1.6230, 1.7949, 1.4262, 3.4510], + device='cuda:0'), covar=tensor([0.0656, 0.0864, 0.0885, 0.1242, 0.0713, 0.0520, 0.0838, 0.0182], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0042, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 00:06:44,114 INFO [finetune.py:976] (0/7) Epoch 26, batch 4500, loss[loss=0.127, simple_loss=0.1987, pruned_loss=0.02766, over 4787.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2404, pruned_loss=0.04663, over 955991.56 frames. ], batch size: 29, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:07:13,549 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.5297, 3.4868, 2.9873, 4.0663, 3.3766, 3.5228, 1.8173, 3.4979], + device='cuda:0'), covar=tensor([0.1760, 0.1219, 0.3633, 0.1376, 0.2596, 0.1578, 0.5028, 0.2325], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0221, 0.0253, 0.0305, 0.0300, 0.0248, 0.0274, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 00:07:17,722 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147720.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:07:25,112 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-04-28 00:07:37,055 INFO [finetune.py:976] (0/7) Epoch 26, batch 4550, loss[loss=0.1918, simple_loss=0.2528, pruned_loss=0.0654, over 4243.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2421, pruned_loss=0.04705, over 956655.44 frames. ], batch size: 65, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:08:01,004 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147763.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:08:14,011 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.021e+02 1.503e+02 1.815e+02 2.151e+02 5.912e+02, threshold=3.631e+02, percent-clipped=1.0 +2023-04-28 00:08:31,437 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=147790.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:08:32,580 INFO [finetune.py:976] (0/7) Epoch 26, batch 4600, loss[loss=0.1364, simple_loss=0.207, pruned_loss=0.0329, over 4791.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2416, pruned_loss=0.04694, over 957182.03 frames. ], batch size: 29, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:08:47,315 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147815.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:08:47,935 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5915, 1.8222, 1.6998, 2.1153, 2.0086, 2.2014, 1.7202, 4.4421], + device='cuda:0'), covar=tensor([0.0496, 0.0797, 0.0768, 0.1154, 0.0598, 0.0467, 0.0715, 0.0089], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 00:09:06,109 INFO [finetune.py:976] (0/7) Epoch 26, batch 4650, loss[loss=0.1345, simple_loss=0.2039, pruned_loss=0.03252, over 4862.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2383, pruned_loss=0.04606, over 957935.00 frames. ], batch size: 31, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:09:09,885 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3063, 1.6496, 1.5149, 1.8986, 1.7669, 1.8678, 1.5030, 3.5641], + device='cuda:0'), covar=tensor([0.0580, 0.0749, 0.0739, 0.1085, 0.0595, 0.0445, 0.0693, 0.0155], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 00:09:15,422 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147857.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:09:25,965 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 7.854e+01 1.470e+02 1.652e+02 1.982e+02 3.804e+02, threshold=3.304e+02, percent-clipped=1.0 +2023-04-28 00:09:29,035 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147876.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:09:33,058 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.2490, 3.6713, 3.2512, 3.5207, 2.8425, 3.2058, 3.2814, 2.8575], + device='cuda:0'), covar=tensor([0.1670, 0.1099, 0.0703, 0.0991, 0.2754, 0.0999, 0.1715, 0.2418], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0296, 0.0214, 0.0275, 0.0312, 0.0251, 0.0247, 0.0261], + device='cuda:0'), out_proj_covar=tensor([1.1221e-04, 1.1657e-04, 8.3975e-05, 1.0811e-04, 1.2591e-04, 9.8897e-05, + 9.9839e-05, 1.0284e-04], device='cuda:0') +2023-04-28 00:09:40,051 INFO [finetune.py:976] (0/7) Epoch 26, batch 4700, loss[loss=0.1554, simple_loss=0.2263, pruned_loss=0.04229, over 4904.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2359, pruned_loss=0.04548, over 957259.58 frames. ], batch size: 35, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:09:43,173 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147897.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:09:51,617 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=147911.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:09:55,887 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147918.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:10:07,696 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-28 00:10:12,814 INFO [finetune.py:976] (0/7) Epoch 26, batch 4750, loss[loss=0.1838, simple_loss=0.2551, pruned_loss=0.05626, over 4899.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2334, pruned_loss=0.04479, over 957492.61 frames. ], batch size: 43, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:10:15,197 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=147945.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:10:28,083 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=147966.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:10:31,603 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.017e+02 1.547e+02 1.796e+02 2.219e+02 3.890e+02, threshold=3.592e+02, percent-clipped=1.0 +2023-04-28 00:10:31,716 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=147972.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:10:46,577 INFO [finetune.py:976] (0/7) Epoch 26, batch 4800, loss[loss=0.2101, simple_loss=0.2843, pruned_loss=0.06793, over 4886.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2365, pruned_loss=0.0454, over 956491.99 frames. ], batch size: 32, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:10:52,092 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-148000.pt +2023-04-28 00:10:56,450 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9377, 1.2322, 5.2880, 4.9447, 4.5045, 5.0859, 4.5651, 4.6031], + device='cuda:0'), covar=tensor([0.6986, 0.6664, 0.0884, 0.1719, 0.1085, 0.1596, 0.1324, 0.1623], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0309, 0.0407, 0.0409, 0.0347, 0.0413, 0.0318, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:11:01,949 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=148014.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:11:02,576 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148015.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:11:21,740 INFO [finetune.py:976] (0/7) Epoch 26, batch 4850, loss[loss=0.1815, simple_loss=0.2531, pruned_loss=0.05499, over 4824.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2403, pruned_loss=0.04685, over 954560.39 frames. ], batch size: 38, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:11:23,137 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1486, 1.8587, 2.3296, 2.5902, 2.1500, 2.0511, 2.1867, 2.1598], + device='cuda:0'), covar=tensor([0.4454, 0.7272, 0.6554, 0.5409, 0.6094, 0.8727, 0.8398, 1.0026], + device='cuda:0'), in_proj_covar=tensor([0.0440, 0.0421, 0.0514, 0.0506, 0.0468, 0.0504, 0.0505, 0.0518], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:11:44,622 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148063.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:11:55,270 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.111e+02 1.622e+02 1.940e+02 2.340e+02 4.108e+02, threshold=3.881e+02, percent-clipped=3.0 +2023-04-28 00:12:07,477 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9025, 2.1940, 1.9498, 2.2054, 1.6165, 1.9258, 1.7604, 1.4003], + device='cuda:0'), covar=tensor([0.1670, 0.0995, 0.0702, 0.0932, 0.3092, 0.0991, 0.1745, 0.2273], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0297, 0.0214, 0.0275, 0.0312, 0.0252, 0.0247, 0.0261], + device='cuda:0'), out_proj_covar=tensor([1.1246e-04, 1.1676e-04, 8.4054e-05, 1.0807e-04, 1.2590e-04, 9.9131e-05, + 9.9751e-05, 1.0287e-04], device='cuda:0') +2023-04-28 00:12:18,060 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148090.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:12:24,902 INFO [finetune.py:976] (0/7) Epoch 26, batch 4900, loss[loss=0.2209, simple_loss=0.2795, pruned_loss=0.08115, over 4784.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2419, pruned_loss=0.04693, over 955271.17 frames. ], batch size: 29, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:12:47,898 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=148111.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:12:49,799 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2385, 1.2584, 1.3246, 1.6021, 1.6599, 1.2936, 0.9640, 1.4404], + device='cuda:0'), covar=tensor([0.0923, 0.1461, 0.1075, 0.0685, 0.0677, 0.0945, 0.0929, 0.0657], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0202, 0.0185, 0.0171, 0.0178, 0.0178, 0.0152, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:13:22,192 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=148138.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:13:22,901 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0830, 1.8977, 2.3422, 2.2568, 2.0492, 1.9976, 2.1524, 2.2003], + device='cuda:0'), covar=tensor([0.6040, 0.8475, 0.9152, 0.8231, 0.7721, 1.1201, 1.0627, 1.1107], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0423, 0.0516, 0.0507, 0.0470, 0.0506, 0.0507, 0.0520], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:13:30,216 INFO [finetune.py:976] (0/7) Epoch 26, batch 4950, loss[loss=0.1493, simple_loss=0.2283, pruned_loss=0.03517, over 4747.00 frames. ], tot_loss[loss=0.1694, simple_loss=0.2435, pruned_loss=0.04766, over 954705.03 frames. ], batch size: 54, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:13:41,362 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.41 vs. limit=5.0 +2023-04-28 00:13:52,200 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148171.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:13:52,737 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.839e+01 1.452e+02 1.801e+02 2.197e+02 4.892e+02, threshold=3.601e+02, percent-clipped=1.0 +2023-04-28 00:13:54,716 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3810, 1.5906, 1.8712, 1.9370, 1.8397, 1.8768, 1.8610, 1.8660], + device='cuda:0'), covar=tensor([0.3954, 0.5481, 0.4164, 0.4166, 0.5582, 0.6850, 0.5077, 0.4728], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0375, 0.0329, 0.0340, 0.0351, 0.0396, 0.0361, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 00:13:58,372 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7494, 1.3392, 1.4415, 1.4731, 1.9086, 1.5537, 1.3202, 1.3670], + device='cuda:0'), covar=tensor([0.1620, 0.1493, 0.1948, 0.1444, 0.0929, 0.1636, 0.1864, 0.2338], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0311, 0.0353, 0.0288, 0.0329, 0.0306, 0.0300, 0.0377], + device='cuda:0'), out_proj_covar=tensor([6.4437e-05, 6.4037e-05, 7.4156e-05, 5.7601e-05, 6.7317e-05, 6.3950e-05, + 6.2073e-05, 7.9980e-05], device='cuda:0') +2023-04-28 00:14:06,561 INFO [finetune.py:976] (0/7) Epoch 26, batch 5000, loss[loss=0.1502, simple_loss=0.2291, pruned_loss=0.03566, over 4935.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2416, pruned_loss=0.04719, over 954046.82 frames. ], batch size: 38, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:14:21,039 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148213.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:14:39,765 INFO [finetune.py:976] (0/7) Epoch 26, batch 5050, loss[loss=0.1572, simple_loss=0.2288, pruned_loss=0.04278, over 4895.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2394, pruned_loss=0.04659, over 955617.76 frames. ], batch size: 32, lr: 2.95e-03, grad_scale: 32.0 +2023-04-28 00:14:40,542 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7466, 1.1820, 1.7967, 2.2690, 1.7844, 1.6957, 1.7251, 1.6874], + device='cuda:0'), covar=tensor([0.4099, 0.6383, 0.5465, 0.4638, 0.5320, 0.6887, 0.6909, 0.8027], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0423, 0.0515, 0.0506, 0.0470, 0.0506, 0.0506, 0.0519], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:14:56,590 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148267.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:14:59,522 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.057e+01 1.422e+02 1.750e+02 2.059e+02 5.482e+02, threshold=3.501e+02, percent-clipped=2.0 +2023-04-28 00:15:08,791 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148287.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:15:11,732 INFO [finetune.py:976] (0/7) Epoch 26, batch 5100, loss[loss=0.174, simple_loss=0.2384, pruned_loss=0.05477, over 4860.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.236, pruned_loss=0.04557, over 954645.01 frames. ], batch size: 31, lr: 2.95e-03, grad_scale: 64.0 +2023-04-28 00:15:21,530 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1526, 1.4857, 1.3528, 1.7127, 1.5684, 1.7051, 1.3815, 3.0780], + device='cuda:0'), covar=tensor([0.0642, 0.0810, 0.0836, 0.1190, 0.0636, 0.0481, 0.0744, 0.0156], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0014, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 00:15:22,139 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6544, 3.5115, 0.9072, 1.9027, 2.0910, 2.3719, 1.9817, 1.1182], + device='cuda:0'), covar=tensor([0.1294, 0.0860, 0.2021, 0.1251, 0.0946, 0.1033, 0.1590, 0.1889], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0235, 0.0134, 0.0120, 0.0130, 0.0150, 0.0116, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 00:15:28,678 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148315.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:15:45,037 INFO [finetune.py:976] (0/7) Epoch 26, batch 5150, loss[loss=0.1731, simple_loss=0.2536, pruned_loss=0.04625, over 4862.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.237, pruned_loss=0.04638, over 953834.85 frames. ], batch size: 34, lr: 2.95e-03, grad_scale: 64.0 +2023-04-28 00:15:49,315 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148348.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:15:56,082 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8292, 1.7322, 1.9783, 2.2778, 2.4274, 1.7949, 1.6198, 2.0128], + device='cuda:0'), covar=tensor([0.0805, 0.1040, 0.0691, 0.0533, 0.0550, 0.0836, 0.0682, 0.0552], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0201, 0.0183, 0.0170, 0.0176, 0.0176, 0.0151, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:16:00,238 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=148363.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:16:06,182 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.267e+01 1.646e+02 1.916e+02 2.227e+02 4.429e+02, threshold=3.832e+02, percent-clipped=2.0 +2023-04-28 00:16:18,409 INFO [finetune.py:976] (0/7) Epoch 26, batch 5200, loss[loss=0.1568, simple_loss=0.2202, pruned_loss=0.04666, over 4760.00 frames. ], tot_loss[loss=0.168, simple_loss=0.241, pruned_loss=0.04753, over 954507.11 frames. ], batch size: 26, lr: 2.95e-03, grad_scale: 64.0 +2023-04-28 00:16:23,333 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-28 00:16:44,043 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148429.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:16:51,852 INFO [finetune.py:976] (0/7) Epoch 26, batch 5250, loss[loss=0.1814, simple_loss=0.2613, pruned_loss=0.05081, over 4897.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2428, pruned_loss=0.04764, over 954881.01 frames. ], batch size: 35, lr: 2.94e-03, grad_scale: 64.0 +2023-04-28 00:17:12,104 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148471.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:17:12,616 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.069e+02 1.667e+02 1.923e+02 2.369e+02 3.667e+02, threshold=3.847e+02, percent-clipped=0.0 +2023-04-28 00:17:16,979 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8482, 4.1680, 0.6862, 2.0492, 2.3324, 2.7588, 2.4245, 0.9637], + device='cuda:0'), covar=tensor([0.1301, 0.1032, 0.2135, 0.1287, 0.0956, 0.1045, 0.1356, 0.2188], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0235, 0.0134, 0.0120, 0.0130, 0.0150, 0.0116, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 00:17:23,728 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148490.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:17:24,853 INFO [finetune.py:976] (0/7) Epoch 26, batch 5300, loss[loss=0.1896, simple_loss=0.272, pruned_loss=0.05359, over 4781.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2438, pruned_loss=0.04779, over 953423.03 frames. ], batch size: 51, lr: 2.94e-03, grad_scale: 64.0 +2023-04-28 00:17:28,531 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148498.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:17:31,808 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-28 00:17:47,182 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148513.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:17:53,157 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-28 00:17:56,052 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=148519.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:18:24,823 INFO [finetune.py:976] (0/7) Epoch 26, batch 5350, loss[loss=0.1827, simple_loss=0.253, pruned_loss=0.0562, over 4903.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2424, pruned_loss=0.04721, over 953807.04 frames. ], batch size: 37, lr: 2.94e-03, grad_scale: 64.0 +2023-04-28 00:18:36,122 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-28 00:18:46,004 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148559.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:18:47,172 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=148561.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:18:56,334 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148567.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:18:59,258 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.079e+02 1.519e+02 1.727e+02 2.044e+02 3.466e+02, threshold=3.453e+02, percent-clipped=0.0 +2023-04-28 00:19:22,272 INFO [finetune.py:976] (0/7) Epoch 26, batch 5400, loss[loss=0.1634, simple_loss=0.242, pruned_loss=0.04233, over 4779.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2401, pruned_loss=0.04661, over 955552.45 frames. ], batch size: 29, lr: 2.94e-03, grad_scale: 64.0 +2023-04-28 00:19:33,056 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.87 vs. limit=2.0 +2023-04-28 00:19:53,634 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=148615.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:20:04,629 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.58 vs. limit=2.0 +2023-04-28 00:20:05,815 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.70 vs. limit=5.0 +2023-04-28 00:20:11,714 INFO [finetune.py:976] (0/7) Epoch 26, batch 5450, loss[loss=0.1726, simple_loss=0.2458, pruned_loss=0.04968, over 4910.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2372, pruned_loss=0.04584, over 955201.16 frames. ], batch size: 37, lr: 2.94e-03, grad_scale: 64.0 +2023-04-28 00:20:12,386 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148643.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:20:16,108 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9118, 2.2419, 2.1450, 2.2736, 2.1596, 2.1663, 2.2125, 2.1267], + device='cuda:0'), covar=tensor([0.3690, 0.5983, 0.4720, 0.3997, 0.5424, 0.6615, 0.6357, 0.5674], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0373, 0.0328, 0.0338, 0.0349, 0.0394, 0.0360, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 00:20:17,254 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148651.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:20:31,294 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.574e+02 1.926e+02 2.349e+02 4.613e+02, threshold=3.853e+02, percent-clipped=6.0 +2023-04-28 00:20:45,353 INFO [finetune.py:976] (0/7) Epoch 26, batch 5500, loss[loss=0.1351, simple_loss=0.2069, pruned_loss=0.03168, over 4771.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2342, pruned_loss=0.04489, over 954921.56 frames. ], batch size: 26, lr: 2.94e-03, grad_scale: 64.0 +2023-04-28 00:20:57,700 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148712.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:21:00,138 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=148716.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:21:18,755 INFO [finetune.py:976] (0/7) Epoch 26, batch 5550, loss[loss=0.1665, simple_loss=0.2549, pruned_loss=0.03907, over 4899.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2357, pruned_loss=0.04498, over 955962.04 frames. ], batch size: 43, lr: 2.94e-03, grad_scale: 64.0 +2023-04-28 00:21:38,044 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.670e+01 1.487e+02 1.767e+02 2.141e+02 3.989e+02, threshold=3.535e+02, percent-clipped=1.0 +2023-04-28 00:21:41,042 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=148777.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:21:45,565 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148785.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:21:49,594 INFO [finetune.py:976] (0/7) Epoch 26, batch 5600, loss[loss=0.1694, simple_loss=0.2339, pruned_loss=0.05239, over 4776.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2396, pruned_loss=0.04627, over 956225.28 frames. ], batch size: 26, lr: 2.94e-03, grad_scale: 64.0 +2023-04-28 00:22:16,096 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0777, 1.3547, 1.2214, 1.6480, 1.4670, 1.4014, 1.3841, 2.4378], + device='cuda:0'), covar=tensor([0.0611, 0.0841, 0.0822, 0.1174, 0.0627, 0.0505, 0.0738, 0.0202], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0014, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 00:22:20,663 INFO [finetune.py:976] (0/7) Epoch 26, batch 5650, loss[loss=0.1793, simple_loss=0.2583, pruned_loss=0.0501, over 4858.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2415, pruned_loss=0.04628, over 955330.86 frames. ], batch size: 31, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:22:23,057 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2068, 2.9070, 1.0181, 1.6736, 2.4240, 1.1537, 3.9657, 1.8877], + device='cuda:0'), covar=tensor([0.0772, 0.0782, 0.0878, 0.1256, 0.0508, 0.1148, 0.0259, 0.0672], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0050, 0.0052, 0.0074, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 00:22:27,778 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=148854.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:22:32,626 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8142, 1.4044, 1.5930, 1.5379, 1.9709, 1.6615, 1.3913, 1.5348], + device='cuda:0'), covar=tensor([0.1791, 0.1448, 0.1711, 0.1354, 0.0944, 0.1425, 0.2012, 0.2318], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0312, 0.0356, 0.0291, 0.0330, 0.0309, 0.0303, 0.0379], + device='cuda:0'), out_proj_covar=tensor([6.5133e-05, 6.4224e-05, 7.4781e-05, 5.8210e-05, 6.7588e-05, 6.4503e-05, + 6.2761e-05, 8.0304e-05], device='cuda:0') +2023-04-28 00:22:39,032 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.125e+02 1.517e+02 1.815e+02 2.254e+02 4.751e+02, threshold=3.630e+02, percent-clipped=7.0 +2023-04-28 00:22:46,195 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4266, 4.5744, 1.1548, 2.5362, 2.6960, 3.1744, 2.8248, 1.2741], + device='cuda:0'), covar=tensor([0.1144, 0.0788, 0.1980, 0.1164, 0.0925, 0.0993, 0.1326, 0.2013], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0237, 0.0135, 0.0121, 0.0131, 0.0151, 0.0117, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 00:22:50,264 INFO [finetune.py:976] (0/7) Epoch 26, batch 5700, loss[loss=0.1437, simple_loss=0.2086, pruned_loss=0.03944, over 4260.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2377, pruned_loss=0.04582, over 933544.49 frames. ], batch size: 18, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:23:07,651 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-26.pt +2023-04-28 00:23:20,155 INFO [finetune.py:976] (0/7) Epoch 27, batch 0, loss[loss=0.1474, simple_loss=0.2397, pruned_loss=0.02755, over 4792.00 frames. ], tot_loss[loss=0.1474, simple_loss=0.2397, pruned_loss=0.02755, over 4792.00 frames. ], batch size: 29, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:23:20,156 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-28 00:23:31,740 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0997, 2.5241, 1.1046, 1.4120, 2.0542, 1.2534, 3.0292, 1.7550], + device='cuda:0'), covar=tensor([0.0661, 0.0551, 0.0696, 0.1255, 0.0411, 0.0935, 0.0235, 0.0582], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0064, 0.0047, 0.0046, 0.0050, 0.0051, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 00:23:32,214 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3686, 1.2793, 1.6059, 1.6111, 1.2881, 1.2189, 1.3175, 0.8213], + device='cuda:0'), covar=tensor([0.0526, 0.0610, 0.0417, 0.0461, 0.0729, 0.1015, 0.0525, 0.0555], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0066, 0.0068, 0.0074, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 00:23:41,725 INFO [finetune.py:1010] (0/7) Epoch 27, validation: loss=0.1548, simple_loss=0.2237, pruned_loss=0.04298, over 2265189.00 frames. +2023-04-28 00:23:41,725 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-28 00:24:11,154 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=148943.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:24:43,144 INFO [finetune.py:976] (0/7) Epoch 27, batch 50, loss[loss=0.1409, simple_loss=0.2123, pruned_loss=0.0348, over 4723.00 frames. ], tot_loss[loss=0.1699, simple_loss=0.2422, pruned_loss=0.0488, over 215645.62 frames. ], batch size: 54, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:24:45,491 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.880e+01 1.482e+02 1.782e+02 2.132e+02 4.593e+02, threshold=3.564e+02, percent-clipped=2.0 +2023-04-28 00:24:52,715 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=2.01 vs. limit=2.0 +2023-04-28 00:25:07,486 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=148991.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:25:28,967 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149007.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:25:37,829 INFO [finetune.py:976] (0/7) Epoch 27, batch 100, loss[loss=0.177, simple_loss=0.2474, pruned_loss=0.05328, over 4785.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.239, pruned_loss=0.04733, over 380600.88 frames. ], batch size: 51, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:26:11,901 INFO [finetune.py:976] (0/7) Epoch 27, batch 150, loss[loss=0.1513, simple_loss=0.2257, pruned_loss=0.03844, over 4764.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2325, pruned_loss=0.04541, over 508541.44 frames. ], batch size: 28, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:26:13,177 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149072.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:26:13,726 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.439e+02 1.708e+02 1.990e+02 3.271e+02, threshold=3.417e+02, percent-clipped=0.0 +2023-04-28 00:26:22,651 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149085.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:26:30,174 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8495, 1.3756, 1.5633, 1.5028, 1.9777, 1.6987, 1.4449, 1.4997], + device='cuda:0'), covar=tensor([0.1862, 0.2039, 0.2366, 0.1808, 0.1171, 0.1747, 0.2015, 0.2541], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0309, 0.0352, 0.0288, 0.0327, 0.0307, 0.0301, 0.0376], + device='cuda:0'), out_proj_covar=tensor([6.4641e-05, 6.3579e-05, 7.3838e-05, 5.7717e-05, 6.6979e-05, 6.4048e-05, + 6.2243e-05, 7.9547e-05], device='cuda:0') +2023-04-28 00:26:45,180 INFO [finetune.py:976] (0/7) Epoch 27, batch 200, loss[loss=0.1688, simple_loss=0.2385, pruned_loss=0.04952, over 3993.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2314, pruned_loss=0.04473, over 607190.63 frames. ], batch size: 65, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:26:55,605 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=149133.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:27:08,575 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149154.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:27:18,791 INFO [finetune.py:976] (0/7) Epoch 27, batch 250, loss[loss=0.1503, simple_loss=0.2267, pruned_loss=0.0369, over 4763.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2361, pruned_loss=0.04621, over 683753.51 frames. ], batch size: 28, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:27:21,649 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.484e+02 1.804e+02 2.162e+02 4.404e+02, threshold=3.609e+02, percent-clipped=2.0 +2023-04-28 00:27:40,607 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.51 vs. limit=5.0 +2023-04-28 00:27:41,042 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=149202.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:27:52,339 INFO [finetune.py:976] (0/7) Epoch 27, batch 300, loss[loss=0.1045, simple_loss=0.1792, pruned_loss=0.01493, over 4820.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2397, pruned_loss=0.0471, over 745256.22 frames. ], batch size: 25, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:28:12,506 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6154, 1.5576, 1.8739, 1.9592, 1.5337, 1.3264, 1.6783, 1.0506], + device='cuda:0'), covar=tensor([0.0637, 0.0619, 0.0432, 0.0642, 0.0644, 0.0850, 0.0604, 0.0599], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0066, 0.0069, 0.0074, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 00:28:13,717 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1673, 1.3106, 1.2138, 1.6021, 1.4023, 1.4816, 1.3384, 2.4952], + device='cuda:0'), covar=tensor([0.0544, 0.0840, 0.0801, 0.1133, 0.0668, 0.0478, 0.0732, 0.0188], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0042, 0.0040, 0.0037, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 00:28:22,258 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0877, 1.2834, 5.2480, 4.9255, 4.5085, 5.0663, 4.5812, 4.6443], + device='cuda:0'), covar=tensor([0.7189, 0.6513, 0.1101, 0.1871, 0.1047, 0.1654, 0.1337, 0.1541], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0312, 0.0413, 0.0413, 0.0350, 0.0416, 0.0321, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:28:25,826 INFO [finetune.py:976] (0/7) Epoch 27, batch 350, loss[loss=0.1619, simple_loss=0.2428, pruned_loss=0.0405, over 4810.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2426, pruned_loss=0.04796, over 792609.56 frames. ], batch size: 25, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:28:28,122 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.225e+02 1.612e+02 1.887e+02 2.251e+02 5.949e+02, threshold=3.774e+02, percent-clipped=2.0 +2023-04-28 00:28:41,702 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3773, 1.5672, 1.8425, 1.9009, 1.8112, 1.9312, 1.9138, 1.8917], + device='cuda:0'), covar=tensor([0.3551, 0.5100, 0.4016, 0.4078, 0.5056, 0.6338, 0.4458, 0.4416], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0374, 0.0329, 0.0339, 0.0350, 0.0394, 0.0359, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 00:28:51,971 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149307.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:28:59,851 INFO [finetune.py:976] (0/7) Epoch 27, batch 400, loss[loss=0.1604, simple_loss=0.2274, pruned_loss=0.04667, over 4866.00 frames. ], tot_loss[loss=0.1703, simple_loss=0.2439, pruned_loss=0.04833, over 829453.90 frames. ], batch size: 31, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:29:07,756 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8607, 1.5041, 1.5592, 1.6696, 2.0298, 1.6821, 1.4626, 1.3862], + device='cuda:0'), covar=tensor([0.1876, 0.1429, 0.1945, 0.1323, 0.0918, 0.1542, 0.2023, 0.2385], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0308, 0.0351, 0.0287, 0.0326, 0.0306, 0.0299, 0.0374], + device='cuda:0'), out_proj_covar=tensor([6.4305e-05, 6.3354e-05, 7.3556e-05, 5.7557e-05, 6.6794e-05, 6.3813e-05, + 6.1891e-05, 7.9155e-05], device='cuda:0') +2023-04-28 00:29:08,523 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.82 vs. limit=5.0 +2023-04-28 00:29:39,541 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=149355.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:29:42,715 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5210, 1.9931, 1.9518, 2.0449, 1.9367, 2.0165, 2.0128, 1.9759], + device='cuda:0'), covar=tensor([0.4012, 0.5467, 0.4661, 0.4132, 0.5818, 0.6279, 0.5212, 0.4928], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0376, 0.0331, 0.0340, 0.0352, 0.0396, 0.0360, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 00:29:48,687 INFO [finetune.py:976] (0/7) Epoch 27, batch 450, loss[loss=0.1684, simple_loss=0.2357, pruned_loss=0.05051, over 4814.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2426, pruned_loss=0.04743, over 858138.53 frames. ], batch size: 40, lr: 2.94e-03, grad_scale: 32.0 +2023-04-28 00:29:50,015 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=149372.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:29:50,984 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.462e+01 1.496e+02 1.752e+02 2.050e+02 4.938e+02, threshold=3.505e+02, percent-clipped=1.0 +2023-04-28 00:30:29,034 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-28 00:30:41,613 INFO [finetune.py:976] (0/7) Epoch 27, batch 500, loss[loss=0.1531, simple_loss=0.2224, pruned_loss=0.04191, over 4861.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2398, pruned_loss=0.04658, over 877786.78 frames. ], batch size: 44, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:30:41,678 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=149420.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:31:43,468 INFO [finetune.py:976] (0/7) Epoch 27, batch 550, loss[loss=0.1498, simple_loss=0.2232, pruned_loss=0.03814, over 4808.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.237, pruned_loss=0.04604, over 894003.81 frames. ], batch size: 51, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:31:43,598 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6868, 1.3628, 1.3566, 1.3669, 1.8576, 1.4951, 1.2909, 1.2917], + device='cuda:0'), covar=tensor([0.1433, 0.1227, 0.1786, 0.1355, 0.0819, 0.1373, 0.1521, 0.2252], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0308, 0.0351, 0.0287, 0.0326, 0.0305, 0.0299, 0.0374], + device='cuda:0'), out_proj_covar=tensor([6.4106e-05, 6.3409e-05, 7.3787e-05, 5.7552e-05, 6.6700e-05, 6.3745e-05, + 6.1869e-05, 7.9136e-05], device='cuda:0') +2023-04-28 00:31:45,296 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.482e+02 1.685e+02 2.089e+02 3.058e+02, threshold=3.371e+02, percent-clipped=0.0 +2023-04-28 00:31:59,873 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-28 00:32:13,117 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149514.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:32:16,577 INFO [finetune.py:976] (0/7) Epoch 27, batch 600, loss[loss=0.1725, simple_loss=0.2484, pruned_loss=0.04834, over 4932.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2373, pruned_loss=0.04612, over 909298.17 frames. ], batch size: 33, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:32:24,508 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2560, 1.7318, 2.0759, 2.2430, 2.0611, 1.7512, 1.1989, 1.7790], + device='cuda:0'), covar=tensor([0.3306, 0.3166, 0.1728, 0.2088, 0.2496, 0.2640, 0.4091, 0.1912], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0246, 0.0228, 0.0314, 0.0221, 0.0234, 0.0228, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 00:32:47,527 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149565.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:32:50,408 INFO [finetune.py:976] (0/7) Epoch 27, batch 650, loss[loss=0.2203, simple_loss=0.2829, pruned_loss=0.07886, over 4873.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2412, pruned_loss=0.04737, over 921317.11 frames. ], batch size: 34, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:32:52,255 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.202e+02 1.546e+02 1.916e+02 2.340e+02 4.465e+02, threshold=3.832e+02, percent-clipped=5.0 +2023-04-28 00:32:53,617 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149575.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:33:23,678 INFO [finetune.py:976] (0/7) Epoch 27, batch 700, loss[loss=0.1449, simple_loss=0.2223, pruned_loss=0.0337, over 4812.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2407, pruned_loss=0.04681, over 927481.99 frames. ], batch size: 25, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:33:27,480 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149626.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 00:33:39,472 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3352, 2.9920, 0.8461, 1.5489, 1.6205, 2.0648, 1.6865, 0.9635], + device='cuda:0'), covar=tensor([0.1423, 0.0888, 0.1893, 0.1292, 0.1108, 0.1078, 0.1621, 0.1859], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0239, 0.0136, 0.0122, 0.0132, 0.0153, 0.0118, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 00:33:56,620 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-28 00:33:56,991 INFO [finetune.py:976] (0/7) Epoch 27, batch 750, loss[loss=0.1604, simple_loss=0.2202, pruned_loss=0.05032, over 4810.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2427, pruned_loss=0.04756, over 933415.87 frames. ], batch size: 25, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:33:58,787 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.029e+02 1.603e+02 1.862e+02 2.208e+02 4.099e+02, threshold=3.723e+02, percent-clipped=2.0 +2023-04-28 00:34:03,119 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4103, 1.5485, 5.5069, 5.1657, 4.7624, 5.2794, 4.8563, 4.9115], + device='cuda:0'), covar=tensor([0.6170, 0.6011, 0.0830, 0.1523, 0.0871, 0.1485, 0.1071, 0.1334], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0309, 0.0409, 0.0409, 0.0347, 0.0413, 0.0320, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:34:15,758 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0036, 2.5718, 1.1501, 1.4101, 2.0651, 1.1997, 3.3378, 1.7412], + device='cuda:0'), covar=tensor([0.0676, 0.0626, 0.0720, 0.1117, 0.0466, 0.0957, 0.0187, 0.0571], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 00:34:25,970 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-28 00:34:42,992 INFO [finetune.py:976] (0/7) Epoch 27, batch 800, loss[loss=0.1972, simple_loss=0.2635, pruned_loss=0.0654, over 4788.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2427, pruned_loss=0.04749, over 937869.08 frames. ], batch size: 26, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:35:14,870 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=149746.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 00:35:48,631 INFO [finetune.py:976] (0/7) Epoch 27, batch 850, loss[loss=0.162, simple_loss=0.2336, pruned_loss=0.04518, over 4846.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2409, pruned_loss=0.04678, over 942419.49 frames. ], batch size: 47, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:35:55,544 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.475e+02 1.749e+02 2.076e+02 4.110e+02, threshold=3.498e+02, percent-clipped=2.0 +2023-04-28 00:36:23,295 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6496, 1.2434, 4.4001, 3.8520, 3.9320, 4.0495, 3.9818, 3.6318], + device='cuda:0'), covar=tensor([0.9101, 0.8273, 0.1576, 0.2896, 0.1828, 0.2954, 0.2351, 0.2745], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0310, 0.0411, 0.0410, 0.0349, 0.0416, 0.0321, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:36:24,410 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=149807.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 00:36:33,260 INFO [finetune.py:976] (0/7) Epoch 27, batch 900, loss[loss=0.1773, simple_loss=0.239, pruned_loss=0.0578, over 4218.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2378, pruned_loss=0.04555, over 944337.68 frames. ], batch size: 65, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:36:41,327 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.61 vs. limit=2.0 +2023-04-28 00:36:42,483 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2830, 1.4768, 1.2896, 1.4326, 1.1879, 1.2314, 1.3628, 1.1508], + device='cuda:0'), covar=tensor([0.1795, 0.1380, 0.1039, 0.1300, 0.3640, 0.1257, 0.1622, 0.1852], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0300, 0.0215, 0.0277, 0.0314, 0.0254, 0.0249, 0.0264], + device='cuda:0'), out_proj_covar=tensor([1.1313e-04, 1.1801e-04, 8.4497e-05, 1.0898e-04, 1.2667e-04, 9.9830e-05, + 1.0046e-04, 1.0414e-04], device='cuda:0') +2023-04-28 00:37:06,214 INFO [finetune.py:976] (0/7) Epoch 27, batch 950, loss[loss=0.1948, simple_loss=0.2664, pruned_loss=0.06156, over 4864.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2363, pruned_loss=0.04575, over 946520.95 frames. ], batch size: 44, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:37:06,275 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149870.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:37:08,540 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.083e+02 1.514e+02 1.776e+02 2.110e+02 4.018e+02, threshold=3.552e+02, percent-clipped=2.0 +2023-04-28 00:37:37,865 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.94 vs. limit=5.0 +2023-04-28 00:37:40,040 INFO [finetune.py:976] (0/7) Epoch 27, batch 1000, loss[loss=0.1834, simple_loss=0.2626, pruned_loss=0.05208, over 4819.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2382, pruned_loss=0.04627, over 950709.25 frames. ], batch size: 38, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:37:40,684 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=149921.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 00:37:51,045 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-28 00:38:13,689 INFO [finetune.py:976] (0/7) Epoch 27, batch 1050, loss[loss=0.2262, simple_loss=0.2968, pruned_loss=0.07782, over 4826.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2418, pruned_loss=0.04696, over 953681.16 frames. ], batch size: 33, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:38:15,492 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.212e+02 1.681e+02 2.029e+02 2.486e+02 4.683e+02, threshold=4.057e+02, percent-clipped=3.0 +2023-04-28 00:38:26,706 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-28 00:38:32,657 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-150000.pt +2023-04-28 00:38:43,945 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.98 vs. limit=2.0 +2023-04-28 00:38:48,335 INFO [finetune.py:976] (0/7) Epoch 27, batch 1100, loss[loss=0.1577, simple_loss=0.2366, pruned_loss=0.03943, over 4882.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.2433, pruned_loss=0.0472, over 954132.65 frames. ], batch size: 43, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:39:22,074 INFO [finetune.py:976] (0/7) Epoch 27, batch 1150, loss[loss=0.1618, simple_loss=0.2403, pruned_loss=0.04165, over 4756.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2441, pruned_loss=0.04766, over 956393.00 frames. ], batch size: 28, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:39:23,884 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.145e+02 1.471e+02 1.817e+02 2.070e+02 3.709e+02, threshold=3.635e+02, percent-clipped=0.0 +2023-04-28 00:39:28,116 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150079.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:39:33,430 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4157, 1.7618, 1.8977, 1.9909, 1.8502, 1.8494, 1.9041, 1.8844], + device='cuda:0'), covar=tensor([0.3960, 0.5506, 0.4238, 0.4274, 0.5681, 0.7291, 0.5086, 0.5001], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0376, 0.0330, 0.0340, 0.0350, 0.0395, 0.0360, 0.0332], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 00:39:35,367 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-28 00:39:43,681 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150102.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 00:39:48,612 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.74 vs. limit=2.0 +2023-04-28 00:39:54,474 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3496, 1.5573, 1.4281, 1.8559, 1.6937, 2.0253, 1.4669, 3.7030], + device='cuda:0'), covar=tensor([0.0589, 0.0797, 0.0775, 0.1116, 0.0623, 0.0481, 0.0740, 0.0140], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0042, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 00:39:54,487 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1863, 2.5380, 2.0478, 2.4621, 1.7194, 2.0708, 2.2170, 1.7294], + device='cuda:0'), covar=tensor([0.1826, 0.1200, 0.0893, 0.1159, 0.3440, 0.1196, 0.1825, 0.2469], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0301, 0.0215, 0.0277, 0.0314, 0.0254, 0.0249, 0.0264], + device='cuda:0'), out_proj_covar=tensor([1.1284e-04, 1.1831e-04, 8.4537e-05, 1.0909e-04, 1.2644e-04, 9.9745e-05, + 1.0029e-04, 1.0430e-04], device='cuda:0') +2023-04-28 00:39:56,129 INFO [finetune.py:976] (0/7) Epoch 27, batch 1200, loss[loss=0.2042, simple_loss=0.2765, pruned_loss=0.06592, over 4853.00 frames. ], tot_loss[loss=0.1693, simple_loss=0.2435, pruned_loss=0.04754, over 956241.14 frames. ], batch size: 44, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:40:09,131 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8835, 2.2765, 1.8834, 2.2076, 1.6674, 1.8893, 1.8554, 1.5411], + device='cuda:0'), covar=tensor([0.1793, 0.1206, 0.0847, 0.1093, 0.3304, 0.1107, 0.2096, 0.2438], + device='cuda:0'), in_proj_covar=tensor([0.0284, 0.0301, 0.0215, 0.0278, 0.0315, 0.0254, 0.0249, 0.0265], + device='cuda:0'), out_proj_covar=tensor([1.1327e-04, 1.1864e-04, 8.4752e-05, 1.0937e-04, 1.2676e-04, 9.9996e-05, + 1.0055e-04, 1.0464e-04], device='cuda:0') +2023-04-28 00:40:10,314 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150140.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:40:12,157 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9246, 1.6857, 1.4688, 1.7241, 2.1086, 1.7286, 1.5576, 1.4060], + device='cuda:0'), covar=tensor([0.1560, 0.1277, 0.1759, 0.1185, 0.0845, 0.1654, 0.2066, 0.2400], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0305, 0.0348, 0.0284, 0.0325, 0.0304, 0.0297, 0.0372], + device='cuda:0'), out_proj_covar=tensor([6.3706e-05, 6.2775e-05, 7.2953e-05, 5.6866e-05, 6.6467e-05, 6.3448e-05, + 6.1377e-05, 7.8706e-05], device='cuda:0') +2023-04-28 00:40:45,077 INFO [finetune.py:976] (0/7) Epoch 27, batch 1250, loss[loss=0.1554, simple_loss=0.2278, pruned_loss=0.04153, over 4876.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2411, pruned_loss=0.04681, over 958331.53 frames. ], batch size: 34, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:40:45,188 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150170.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:40:47,939 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.413e+01 1.487e+02 1.765e+02 2.046e+02 3.394e+02, threshold=3.529e+02, percent-clipped=0.0 +2023-04-28 00:41:41,402 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4686, 1.8190, 1.8915, 1.9895, 1.8935, 1.9098, 1.9438, 1.9046], + device='cuda:0'), covar=tensor([0.3965, 0.4966, 0.4260, 0.4214, 0.5375, 0.6700, 0.4915, 0.4418], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0373, 0.0329, 0.0339, 0.0349, 0.0392, 0.0358, 0.0330], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 00:41:49,585 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=150218.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:41:50,798 INFO [finetune.py:976] (0/7) Epoch 27, batch 1300, loss[loss=0.1662, simple_loss=0.2361, pruned_loss=0.04813, over 4675.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2379, pruned_loss=0.04561, over 958270.71 frames. ], batch size: 59, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:41:51,506 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150221.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:42:56,735 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=150269.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:42:57,286 INFO [finetune.py:976] (0/7) Epoch 27, batch 1350, loss[loss=0.1308, simple_loss=0.1979, pruned_loss=0.03189, over 4807.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2381, pruned_loss=0.04575, over 957942.34 frames. ], batch size: 25, lr: 2.93e-03, grad_scale: 32.0 +2023-04-28 00:42:59,133 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.101e+02 1.474e+02 1.801e+02 2.070e+02 3.416e+02, threshold=3.602e+02, percent-clipped=0.0 +2023-04-28 00:43:05,182 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150274.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:43:18,561 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4734, 1.4429, 1.8041, 1.7559, 1.3217, 1.2027, 1.4190, 0.9088], + device='cuda:0'), covar=tensor([0.0539, 0.0550, 0.0369, 0.0684, 0.0783, 0.1047, 0.0636, 0.0730], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0066, 0.0069, 0.0074, 0.0094, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 00:44:01,099 INFO [finetune.py:976] (0/7) Epoch 27, batch 1400, loss[loss=0.2029, simple_loss=0.2826, pruned_loss=0.06166, over 4758.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2399, pruned_loss=0.04577, over 957394.11 frames. ], batch size: 54, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:44:22,631 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150335.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:45:00,624 INFO [finetune.py:976] (0/7) Epoch 27, batch 1450, loss[loss=0.1436, simple_loss=0.2217, pruned_loss=0.03271, over 4782.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2423, pruned_loss=0.04653, over 953518.82 frames. ], batch size: 29, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:45:03,058 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.594e+02 1.851e+02 2.331e+02 4.105e+02, threshold=3.701e+02, percent-clipped=2.0 +2023-04-28 00:45:04,435 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4786, 1.2191, 0.3252, 1.1800, 1.0397, 1.3428, 1.2813, 1.2678], + device='cuda:0'), covar=tensor([0.0489, 0.0393, 0.0397, 0.0563, 0.0312, 0.0519, 0.0474, 0.0576], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0038, 0.0053, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 00:45:18,588 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5363, 1.3058, 4.1344, 3.8170, 3.6474, 3.9380, 3.8724, 3.6374], + device='cuda:0'), covar=tensor([0.7454, 0.5930, 0.1150, 0.1903, 0.1218, 0.1620, 0.1660, 0.1716], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0311, 0.0413, 0.0411, 0.0350, 0.0417, 0.0322, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:45:22,896 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150402.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 00:45:33,782 INFO [finetune.py:976] (0/7) Epoch 27, batch 1500, loss[loss=0.1336, simple_loss=0.2156, pruned_loss=0.02579, over 4004.00 frames. ], tot_loss[loss=0.1706, simple_loss=0.2447, pruned_loss=0.04827, over 951522.19 frames. ], batch size: 17, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:45:43,906 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150435.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:45:54,540 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=150450.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 00:46:06,954 INFO [finetune.py:976] (0/7) Epoch 27, batch 1550, loss[loss=0.1919, simple_loss=0.2657, pruned_loss=0.05905, over 4805.00 frames. ], tot_loss[loss=0.1707, simple_loss=0.245, pruned_loss=0.04819, over 953191.92 frames. ], batch size: 40, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:46:09,357 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.042e+02 1.486e+02 1.780e+02 2.149e+02 3.630e+02, threshold=3.561e+02, percent-clipped=0.0 +2023-04-28 00:46:40,767 INFO [finetune.py:976] (0/7) Epoch 27, batch 1600, loss[loss=0.1734, simple_loss=0.2389, pruned_loss=0.05397, over 4911.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2434, pruned_loss=0.04808, over 953230.87 frames. ], batch size: 43, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:47:08,155 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150537.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:47:41,254 INFO [finetune.py:976] (0/7) Epoch 27, batch 1650, loss[loss=0.1229, simple_loss=0.1967, pruned_loss=0.02456, over 4813.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2407, pruned_loss=0.0474, over 953969.71 frames. ], batch size: 25, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:47:43,699 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.015e+02 1.594e+02 1.886e+02 2.369e+02 4.479e+02, threshold=3.773e+02, percent-clipped=3.0 +2023-04-28 00:47:50,605 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-28 00:47:54,081 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.59 vs. limit=2.0 +2023-04-28 00:47:54,754 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-04-28 00:48:00,526 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150598.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:48:14,790 INFO [finetune.py:976] (0/7) Epoch 27, batch 1700, loss[loss=0.1569, simple_loss=0.2262, pruned_loss=0.04383, over 4824.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2386, pruned_loss=0.04722, over 953367.92 frames. ], batch size: 39, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:48:20,943 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150630.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:48:48,164 INFO [finetune.py:976] (0/7) Epoch 27, batch 1750, loss[loss=0.1988, simple_loss=0.285, pruned_loss=0.05635, over 4798.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2407, pruned_loss=0.04802, over 955565.40 frames. ], batch size: 45, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:48:50,601 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.161e+02 1.571e+02 1.900e+02 2.319e+02 4.181e+02, threshold=3.799e+02, percent-clipped=4.0 +2023-04-28 00:49:09,530 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9386, 2.4053, 2.1060, 2.3454, 1.6846, 2.0391, 2.1557, 1.5770], + device='cuda:0'), covar=tensor([0.1876, 0.0901, 0.0680, 0.0900, 0.2841, 0.0951, 0.1618, 0.2159], + device='cuda:0'), in_proj_covar=tensor([0.0286, 0.0304, 0.0218, 0.0279, 0.0317, 0.0256, 0.0251, 0.0267], + device='cuda:0'), out_proj_covar=tensor([1.1433e-04, 1.1948e-04, 8.5763e-05, 1.1000e-04, 1.2772e-04, 1.0060e-04, + 1.0112e-04, 1.0531e-04], device='cuda:0') +2023-04-28 00:49:21,912 INFO [finetune.py:976] (0/7) Epoch 27, batch 1800, loss[loss=0.1522, simple_loss=0.2139, pruned_loss=0.04527, over 4720.00 frames. ], tot_loss[loss=0.1702, simple_loss=0.2434, pruned_loss=0.04854, over 956005.96 frames. ], batch size: 23, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:49:29,197 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150732.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:49:31,018 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150735.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:49:54,544 INFO [finetune.py:976] (0/7) Epoch 27, batch 1850, loss[loss=0.1624, simple_loss=0.2422, pruned_loss=0.04136, over 4793.00 frames. ], tot_loss[loss=0.1719, simple_loss=0.2452, pruned_loss=0.04929, over 956088.61 frames. ], batch size: 51, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:50:02,278 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.087e+02 1.541e+02 1.934e+02 2.290e+02 4.009e+02, threshold=3.867e+02, percent-clipped=2.0 +2023-04-28 00:50:13,356 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=150783.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:50:25,736 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150793.0, num_to_drop=1, layers_to_drop={3} +2023-04-28 00:50:45,581 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4826, 1.6929, 1.8452, 1.9331, 1.7631, 1.8085, 1.9647, 1.9270], + device='cuda:0'), covar=tensor([0.3598, 0.5169, 0.4364, 0.4306, 0.5759, 0.7121, 0.4804, 0.4782], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0375, 0.0330, 0.0341, 0.0350, 0.0395, 0.0360, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 00:50:49,660 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8095, 3.4200, 0.7901, 1.9206, 1.9177, 2.3647, 1.9862, 1.0287], + device='cuda:0'), covar=tensor([0.1118, 0.0840, 0.2035, 0.1205, 0.1073, 0.1051, 0.1499, 0.2250], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0238, 0.0136, 0.0121, 0.0131, 0.0152, 0.0118, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 00:50:56,514 INFO [finetune.py:976] (0/7) Epoch 27, batch 1900, loss[loss=0.1773, simple_loss=0.2535, pruned_loss=0.05051, over 4935.00 frames. ], tot_loss[loss=0.1728, simple_loss=0.246, pruned_loss=0.04983, over 953561.01 frames. ], batch size: 42, lr: 2.93e-03, grad_scale: 16.0 +2023-04-28 00:51:29,796 INFO [finetune.py:976] (0/7) Epoch 27, batch 1950, loss[loss=0.1458, simple_loss=0.2238, pruned_loss=0.03397, over 4759.00 frames. ], tot_loss[loss=0.1708, simple_loss=0.2442, pruned_loss=0.04873, over 955682.87 frames. ], batch size: 28, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 00:51:32,197 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.093e+02 1.535e+02 1.747e+02 2.093e+02 5.445e+02, threshold=3.494e+02, percent-clipped=3.0 +2023-04-28 00:51:44,424 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=150893.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:51:49,247 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=150900.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:51:52,764 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8201, 1.1960, 1.8400, 2.2931, 1.9103, 1.7789, 1.8157, 1.7731], + device='cuda:0'), covar=tensor([0.4635, 0.7061, 0.6274, 0.5076, 0.6064, 0.7933, 0.7964, 0.8745], + device='cuda:0'), in_proj_covar=tensor([0.0443, 0.0421, 0.0518, 0.0506, 0.0469, 0.0506, 0.0507, 0.0521], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 00:52:03,060 INFO [finetune.py:976] (0/7) Epoch 27, batch 2000, loss[loss=0.1669, simple_loss=0.2284, pruned_loss=0.0527, over 4661.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2411, pruned_loss=0.04792, over 952007.73 frames. ], batch size: 23, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 00:52:09,165 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=150930.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:52:23,742 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7528, 3.7450, 2.7927, 4.3874, 3.8525, 3.7712, 1.5355, 3.7062], + device='cuda:0'), covar=tensor([0.1758, 0.1189, 0.3168, 0.1517, 0.2748, 0.1785, 0.5642, 0.2257], + device='cuda:0'), in_proj_covar=tensor([0.0242, 0.0217, 0.0249, 0.0299, 0.0295, 0.0245, 0.0272, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 00:52:51,881 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=150961.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:53:02,952 INFO [finetune.py:976] (0/7) Epoch 27, batch 2050, loss[loss=0.1651, simple_loss=0.2332, pruned_loss=0.04853, over 4802.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2378, pruned_loss=0.04694, over 952472.36 frames. ], batch size: 51, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 00:53:05,398 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.068e+02 1.536e+02 1.853e+02 2.161e+02 4.480e+02, threshold=3.705e+02, percent-clipped=3.0 +2023-04-28 00:53:12,608 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=150978.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:54:07,736 INFO [finetune.py:976] (0/7) Epoch 27, batch 2100, loss[loss=0.1347, simple_loss=0.2028, pruned_loss=0.03327, over 4896.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2375, pruned_loss=0.04665, over 953739.93 frames. ], batch size: 32, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 00:55:11,731 INFO [finetune.py:976] (0/7) Epoch 27, batch 2150, loss[loss=0.1586, simple_loss=0.2457, pruned_loss=0.03575, over 4748.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2401, pruned_loss=0.04685, over 954395.36 frames. ], batch size: 54, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 00:55:19,341 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.036e+02 1.480e+02 1.779e+02 2.139e+02 3.586e+02, threshold=3.558e+02, percent-clipped=0.0 +2023-04-28 00:55:33,240 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151088.0, num_to_drop=1, layers_to_drop={3} +2023-04-28 00:56:16,159 INFO [finetune.py:976] (0/7) Epoch 27, batch 2200, loss[loss=0.2029, simple_loss=0.2763, pruned_loss=0.06477, over 4194.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2426, pruned_loss=0.04759, over 954068.32 frames. ], batch size: 65, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 00:57:19,653 INFO [finetune.py:976] (0/7) Epoch 27, batch 2250, loss[loss=0.149, simple_loss=0.2295, pruned_loss=0.03426, over 4761.00 frames. ], tot_loss[loss=0.169, simple_loss=0.2428, pruned_loss=0.0476, over 952308.41 frames. ], batch size: 28, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 00:57:27,221 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.948e+01 1.668e+02 1.911e+02 2.463e+02 4.700e+02, threshold=3.822e+02, percent-clipped=3.0 +2023-04-28 00:57:38,576 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151184.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:57:49,164 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151193.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:58:22,088 INFO [finetune.py:976] (0/7) Epoch 27, batch 2300, loss[loss=0.1532, simple_loss=0.2169, pruned_loss=0.04469, over 4767.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2425, pruned_loss=0.0474, over 951605.74 frames. ], batch size: 28, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 00:58:42,717 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=151241.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:58:42,754 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8561, 2.2145, 1.0016, 1.3084, 1.5736, 1.1485, 2.4369, 1.4398], + device='cuda:0'), covar=tensor([0.0630, 0.0560, 0.0584, 0.1018, 0.0406, 0.0884, 0.0249, 0.0573], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0072, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 00:58:45,174 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151245.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:58:52,343 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151256.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:59:01,205 INFO [finetune.py:976] (0/7) Epoch 27, batch 2350, loss[loss=0.1313, simple_loss=0.2056, pruned_loss=0.02851, over 4896.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2404, pruned_loss=0.04653, over 952792.08 frames. ], batch size: 43, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 00:59:04,084 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.580e+02 1.872e+02 2.240e+02 4.120e+02, threshold=3.744e+02, percent-clipped=1.0 +2023-04-28 00:59:10,173 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151282.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:59:20,473 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151298.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 00:59:34,699 INFO [finetune.py:976] (0/7) Epoch 27, batch 2400, loss[loss=0.1807, simple_loss=0.2478, pruned_loss=0.05673, over 4831.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2377, pruned_loss=0.04606, over 952642.35 frames. ], batch size: 33, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 00:59:44,923 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6909, 3.1147, 1.2180, 1.9349, 2.6992, 1.5982, 4.5913, 2.6485], + device='cuda:0'), covar=tensor([0.0587, 0.0626, 0.0797, 0.1185, 0.0479, 0.1015, 0.0228, 0.0488], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0072, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 00:59:50,888 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151343.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:00:01,124 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151359.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:00:07,713 INFO [finetune.py:976] (0/7) Epoch 27, batch 2450, loss[loss=0.1782, simple_loss=0.2432, pruned_loss=0.05661, over 4923.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2351, pruned_loss=0.04563, over 953033.81 frames. ], batch size: 38, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:00:10,590 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.893e+01 1.571e+02 1.897e+02 2.275e+02 8.002e+02, threshold=3.795e+02, percent-clipped=4.0 +2023-04-28 01:00:21,187 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151388.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:00:41,810 INFO [finetune.py:976] (0/7) Epoch 27, batch 2500, loss[loss=0.2076, simple_loss=0.2705, pruned_loss=0.07233, over 4859.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2379, pruned_loss=0.04687, over 952778.31 frames. ], batch size: 31, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:00:53,952 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=151436.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:01:15,577 INFO [finetune.py:976] (0/7) Epoch 27, batch 2550, loss[loss=0.1229, simple_loss=0.204, pruned_loss=0.02093, over 4799.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2413, pruned_loss=0.04758, over 953210.01 frames. ], batch size: 25, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:01:17,946 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.064e+02 1.630e+02 1.877e+02 2.163e+02 4.851e+02, threshold=3.753e+02, percent-clipped=1.0 +2023-04-28 01:01:43,334 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.57 vs. limit=5.0 +2023-04-28 01:01:48,925 INFO [finetune.py:976] (0/7) Epoch 27, batch 2600, loss[loss=0.1796, simple_loss=0.2542, pruned_loss=0.0525, over 4865.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2433, pruned_loss=0.0484, over 954900.42 frames. ], batch size: 31, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:01:53,856 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8142, 1.6031, 1.9333, 2.3015, 2.3216, 1.7695, 1.4997, 1.9605], + device='cuda:0'), covar=tensor([0.0863, 0.1216, 0.0733, 0.0489, 0.0600, 0.0855, 0.0751, 0.0634], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0200, 0.0183, 0.0169, 0.0177, 0.0177, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:02:02,071 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151540.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:02:19,360 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151556.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:02:33,623 INFO [finetune.py:976] (0/7) Epoch 27, batch 2650, loss[loss=0.1407, simple_loss=0.2296, pruned_loss=0.02592, over 4753.00 frames. ], tot_loss[loss=0.1711, simple_loss=0.245, pruned_loss=0.04854, over 956218.23 frames. ], batch size: 27, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:02:41,497 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.080e+02 1.560e+02 1.768e+02 2.112e+02 4.460e+02, threshold=3.536e+02, percent-clipped=1.0 +2023-04-28 01:03:01,892 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5058, 1.3716, 1.7178, 1.7467, 1.2885, 1.2646, 1.4289, 0.8340], + device='cuda:0'), covar=tensor([0.0535, 0.0587, 0.0385, 0.0478, 0.0796, 0.1067, 0.0548, 0.0608], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0066, 0.0069, 0.0074, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:03:23,442 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=151604.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:03:39,056 INFO [finetune.py:976] (0/7) Epoch 27, batch 2700, loss[loss=0.1646, simple_loss=0.2388, pruned_loss=0.04523, over 4818.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2425, pruned_loss=0.04771, over 955477.00 frames. ], batch size: 39, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:03:46,200 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0888, 1.8492, 2.3686, 2.5199, 2.2016, 2.1022, 2.1897, 2.1251], + device='cuda:0'), covar=tensor([0.4920, 0.7404, 0.7244, 0.5601, 0.6227, 0.8515, 0.9210, 0.9778], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0420, 0.0515, 0.0505, 0.0468, 0.0503, 0.0505, 0.0519], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:04:05,035 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151638.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:04:05,136 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.36 vs. limit=5.0 +2023-04-28 01:04:21,248 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=151654.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:04:40,568 INFO [finetune.py:976] (0/7) Epoch 27, batch 2750, loss[loss=0.1638, simple_loss=0.2313, pruned_loss=0.04815, over 4772.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2394, pruned_loss=0.04678, over 956270.07 frames. ], batch size: 28, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:04:48,521 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.458e+01 1.381e+02 1.737e+02 2.155e+02 3.699e+02, threshold=3.473e+02, percent-clipped=1.0 +2023-04-28 01:05:02,037 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151688.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:05:02,059 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5555, 2.0588, 2.3859, 2.9951, 2.3568, 1.9358, 2.0054, 2.3058], + device='cuda:0'), covar=tensor([0.2676, 0.2905, 0.1408, 0.2021, 0.2543, 0.2389, 0.3373, 0.1904], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0246, 0.0227, 0.0313, 0.0220, 0.0233, 0.0227, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 01:05:44,740 INFO [finetune.py:976] (0/7) Epoch 27, batch 2800, loss[loss=0.1291, simple_loss=0.2035, pruned_loss=0.0273, over 4832.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2367, pruned_loss=0.04579, over 957112.56 frames. ], batch size: 40, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:06:24,876 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151749.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:06:34,603 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5688, 1.9391, 2.2805, 2.8439, 2.3611, 1.8184, 1.7028, 2.4602], + device='cuda:0'), covar=tensor([0.3399, 0.3455, 0.1817, 0.2783, 0.2845, 0.2802, 0.4000, 0.1986], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0246, 0.0227, 0.0313, 0.0221, 0.0233, 0.0227, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 01:06:35,394 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-28 01:06:53,982 INFO [finetune.py:976] (0/7) Epoch 27, batch 2850, loss[loss=0.148, simple_loss=0.2331, pruned_loss=0.03148, over 4035.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2354, pruned_loss=0.04533, over 955247.90 frames. ], batch size: 65, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:06:56,484 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.506e+02 1.770e+02 2.075e+02 3.794e+02, threshold=3.540e+02, percent-clipped=1.0 +2023-04-28 01:06:57,946 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-28 01:07:04,910 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8201, 2.3522, 1.8864, 1.7221, 1.3340, 1.3896, 1.9364, 1.3087], + device='cuda:0'), covar=tensor([0.1580, 0.1246, 0.1238, 0.1562, 0.2205, 0.1826, 0.0896, 0.2004], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0208, 0.0169, 0.0204, 0.0200, 0.0186, 0.0155, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 01:07:29,479 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=151801.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:07:49,682 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=6.39 vs. limit=5.0 +2023-04-28 01:07:58,282 INFO [finetune.py:976] (0/7) Epoch 27, batch 2900, loss[loss=0.1422, simple_loss=0.2168, pruned_loss=0.0338, over 4706.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2386, pruned_loss=0.0467, over 951887.99 frames. ], batch size: 23, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:08:11,763 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151840.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:08:15,948 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8611, 1.8582, 2.3622, 2.3887, 1.6444, 1.5436, 1.8526, 1.1897], + device='cuda:0'), covar=tensor([0.0627, 0.0631, 0.0385, 0.0673, 0.0765, 0.1182, 0.0669, 0.0724], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0066, 0.0069, 0.0074, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:08:15,969 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5684, 2.0002, 2.4728, 3.0434, 2.4220, 1.9355, 2.0584, 2.2977], + device='cuda:0'), covar=tensor([0.2957, 0.2882, 0.1459, 0.2221, 0.2597, 0.2400, 0.3338, 0.1939], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0246, 0.0227, 0.0313, 0.0220, 0.0233, 0.0227, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 01:08:32,190 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=151862.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:08:42,905 INFO [finetune.py:976] (0/7) Epoch 27, batch 2950, loss[loss=0.1148, simple_loss=0.195, pruned_loss=0.0173, over 4795.00 frames. ], tot_loss[loss=0.168, simple_loss=0.242, pruned_loss=0.04704, over 952565.35 frames. ], batch size: 29, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:08:50,555 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.751e+01 1.623e+02 1.878e+02 2.443e+02 4.733e+02, threshold=3.756e+02, percent-clipped=2.0 +2023-04-28 01:09:04,984 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=151888.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:09:47,998 INFO [finetune.py:976] (0/7) Epoch 27, batch 3000, loss[loss=0.14, simple_loss=0.2025, pruned_loss=0.03873, over 4717.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2426, pruned_loss=0.04688, over 952322.26 frames. ], batch size: 23, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:09:48,000 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-28 01:09:50,651 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8006, 1.6413, 1.8361, 2.2265, 2.2476, 1.7572, 1.4245, 1.9631], + device='cuda:0'), covar=tensor([0.0804, 0.1172, 0.0783, 0.0471, 0.0540, 0.0902, 0.0726, 0.0502], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0200, 0.0182, 0.0169, 0.0177, 0.0177, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:10:08,677 INFO [finetune.py:1010] (0/7) Epoch 27, validation: loss=0.1539, simple_loss=0.2224, pruned_loss=0.04268, over 2265189.00 frames. +2023-04-28 01:10:08,677 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-28 01:10:24,776 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151938.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:10:35,073 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=151954.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:10:45,723 INFO [finetune.py:976] (0/7) Epoch 27, batch 3050, loss[loss=0.1967, simple_loss=0.2705, pruned_loss=0.06142, over 4821.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2426, pruned_loss=0.04671, over 950451.77 frames. ], batch size: 38, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:10:48,112 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.167e+02 1.615e+02 1.912e+02 2.194e+02 5.604e+02, threshold=3.825e+02, percent-clipped=1.0 +2023-04-28 01:10:57,064 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=151986.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:11:06,310 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-152000.pt +2023-04-28 01:11:08,926 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=152002.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:11:15,881 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5859, 1.4100, 1.9950, 1.8690, 1.4163, 1.3456, 1.5037, 0.9769], + device='cuda:0'), covar=tensor([0.0530, 0.0795, 0.0376, 0.0620, 0.0879, 0.1213, 0.0660, 0.0670], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0067, 0.0065, 0.0069, 0.0074, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:11:20,459 INFO [finetune.py:976] (0/7) Epoch 27, batch 3100, loss[loss=0.1765, simple_loss=0.2379, pruned_loss=0.05759, over 4871.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2409, pruned_loss=0.04646, over 952442.70 frames. ], batch size: 34, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:11:37,590 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152044.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:11:54,338 INFO [finetune.py:976] (0/7) Epoch 27, batch 3150, loss[loss=0.1413, simple_loss=0.2174, pruned_loss=0.03258, over 4765.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2398, pruned_loss=0.04705, over 954729.37 frames. ], batch size: 28, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:11:56,744 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.740e+01 1.539e+02 1.884e+02 2.286e+02 5.253e+02, threshold=3.767e+02, percent-clipped=2.0 +2023-04-28 01:12:27,069 INFO [finetune.py:976] (0/7) Epoch 27, batch 3200, loss[loss=0.1632, simple_loss=0.2309, pruned_loss=0.04773, over 4911.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2372, pruned_loss=0.04605, over 954306.95 frames. ], batch size: 43, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:12:45,371 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5826, 1.1619, 4.4808, 4.1716, 3.8371, 4.2368, 4.1394, 3.9164], + device='cuda:0'), covar=tensor([0.7771, 0.7030, 0.1210, 0.1923, 0.1356, 0.2258, 0.1438, 0.1837], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0310, 0.0413, 0.0410, 0.0351, 0.0418, 0.0321, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:12:52,023 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152157.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:13:00,433 INFO [finetune.py:976] (0/7) Epoch 27, batch 3250, loss[loss=0.1416, simple_loss=0.2172, pruned_loss=0.03294, over 4869.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2362, pruned_loss=0.04576, over 954158.24 frames. ], batch size: 34, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:13:02,810 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.216e+01 1.479e+02 1.800e+02 2.164e+02 4.753e+02, threshold=3.600e+02, percent-clipped=3.0 +2023-04-28 01:13:13,904 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7524, 3.6640, 2.7776, 4.3495, 3.7359, 3.7602, 1.7376, 3.7150], + device='cuda:0'), covar=tensor([0.1671, 0.1340, 0.3116, 0.1605, 0.3435, 0.1704, 0.5793, 0.2549], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0217, 0.0249, 0.0300, 0.0297, 0.0246, 0.0272, 0.0272], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 01:13:33,031 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0194, 1.0927, 1.1968, 1.1737, 1.0485, 0.9533, 1.0119, 0.4914], + device='cuda:0'), covar=tensor([0.0554, 0.0598, 0.0483, 0.0470, 0.0728, 0.1160, 0.0484, 0.0638], + device='cuda:0'), in_proj_covar=tensor([0.0069, 0.0067, 0.0065, 0.0068, 0.0074, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:13:33,550 INFO [finetune.py:976] (0/7) Epoch 27, batch 3300, loss[loss=0.1715, simple_loss=0.2302, pruned_loss=0.05643, over 4664.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2392, pruned_loss=0.04624, over 955457.17 frames. ], batch size: 23, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:13:36,714 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152225.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:13:59,338 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.7174, 3.7091, 2.7055, 4.2688, 3.6243, 3.6993, 1.7827, 3.6264], + device='cuda:0'), covar=tensor([0.1513, 0.1249, 0.3533, 0.1416, 0.2976, 0.1668, 0.5123, 0.2375], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0218, 0.0251, 0.0302, 0.0298, 0.0247, 0.0274, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 01:14:13,061 INFO [finetune.py:976] (0/7) Epoch 27, batch 3350, loss[loss=0.2129, simple_loss=0.2764, pruned_loss=0.07467, over 4893.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2411, pruned_loss=0.04699, over 952529.31 frames. ], batch size: 43, lr: 2.92e-03, grad_scale: 16.0 +2023-04-28 01:14:14,903 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152273.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:14:15,405 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.492e+02 1.749e+02 2.149e+02 5.486e+02, threshold=3.498e+02, percent-clipped=3.0 +2023-04-28 01:14:34,623 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152286.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:15:17,722 INFO [finetune.py:976] (0/7) Epoch 27, batch 3400, loss[loss=0.1792, simple_loss=0.271, pruned_loss=0.04372, over 4242.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2421, pruned_loss=0.04707, over 953766.04 frames. ], batch size: 65, lr: 2.92e-03, grad_scale: 32.0 +2023-04-28 01:15:36,886 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152334.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:15:47,431 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-28 01:15:49,700 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152344.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:16:22,884 INFO [finetune.py:976] (0/7) Epoch 27, batch 3450, loss[loss=0.1492, simple_loss=0.2196, pruned_loss=0.03937, over 4736.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2409, pruned_loss=0.04612, over 955388.93 frames. ], batch size: 23, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:16:31,452 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.589e+02 1.871e+02 2.255e+02 4.038e+02, threshold=3.742e+02, percent-clipped=2.0 +2023-04-28 01:16:53,824 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=152392.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:17:28,483 INFO [finetune.py:976] (0/7) Epoch 27, batch 3500, loss[loss=0.1812, simple_loss=0.254, pruned_loss=0.05417, over 4769.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.239, pruned_loss=0.04601, over 956785.62 frames. ], batch size: 26, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:18:00,020 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152445.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:18:10,949 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-28 01:18:18,957 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152457.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:18:23,411 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-28 01:18:32,262 INFO [finetune.py:976] (0/7) Epoch 27, batch 3550, loss[loss=0.1743, simple_loss=0.2413, pruned_loss=0.05371, over 4879.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2368, pruned_loss=0.04551, over 957667.98 frames. ], batch size: 31, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:18:34,720 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.620e+01 1.515e+02 1.760e+02 2.193e+02 3.921e+02, threshold=3.521e+02, percent-clipped=1.0 +2023-04-28 01:18:51,319 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8810, 1.7430, 2.0376, 2.3369, 2.3087, 1.9286, 1.5737, 2.0939], + device='cuda:0'), covar=tensor([0.0892, 0.1190, 0.0702, 0.0619, 0.0610, 0.0826, 0.0795, 0.0588], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0199, 0.0182, 0.0168, 0.0176, 0.0177, 0.0150, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:19:25,032 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=152505.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:19:25,757 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152506.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:19:39,436 INFO [finetune.py:976] (0/7) Epoch 27, batch 3600, loss[loss=0.2009, simple_loss=0.2648, pruned_loss=0.0685, over 4212.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2352, pruned_loss=0.04571, over 954857.48 frames. ], batch size: 65, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:20:44,730 INFO [finetune.py:976] (0/7) Epoch 27, batch 3650, loss[loss=0.1566, simple_loss=0.2385, pruned_loss=0.03736, over 4857.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2367, pruned_loss=0.04601, over 955289.41 frames. ], batch size: 44, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:20:51,634 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.035e+02 1.532e+02 1.875e+02 2.206e+02 4.612e+02, threshold=3.749e+02, percent-clipped=4.0 +2023-04-28 01:20:56,030 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152581.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:21:02,377 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152582.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:21:06,626 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7192, 2.2628, 1.7349, 1.6181, 1.2649, 1.3033, 1.7703, 1.1944], + device='cuda:0'), covar=tensor([0.1747, 0.1336, 0.1432, 0.1767, 0.2329, 0.2120, 0.1001, 0.2188], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0210, 0.0170, 0.0205, 0.0201, 0.0187, 0.0156, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 01:21:07,212 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152589.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:21:35,749 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5055, 1.3545, 1.8026, 1.8085, 1.3395, 1.2475, 1.4374, 0.8543], + device='cuda:0'), covar=tensor([0.0503, 0.0740, 0.0369, 0.0554, 0.0790, 0.1132, 0.0638, 0.0673], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0066, 0.0069, 0.0075, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:21:38,621 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4235, 1.0805, 4.4682, 4.2104, 3.8347, 4.2099, 4.1155, 3.9115], + device='cuda:0'), covar=tensor([0.7485, 0.6244, 0.0965, 0.1504, 0.1070, 0.1735, 0.1782, 0.1608], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0310, 0.0412, 0.0410, 0.0350, 0.0419, 0.0321, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:21:47,190 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8820, 1.2558, 4.8550, 4.5996, 4.1545, 4.6070, 4.3108, 4.2810], + device='cuda:0'), covar=tensor([0.7068, 0.6207, 0.1016, 0.1650, 0.1167, 0.1651, 0.1855, 0.1728], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0310, 0.0412, 0.0409, 0.0350, 0.0419, 0.0321, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:21:49,538 INFO [finetune.py:976] (0/7) Epoch 27, batch 3700, loss[loss=0.1617, simple_loss=0.243, pruned_loss=0.0402, over 4925.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2398, pruned_loss=0.04657, over 954633.56 frames. ], batch size: 29, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:22:00,786 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152629.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:22:18,235 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-28 01:22:21,237 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152643.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:22:31,077 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152650.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:22:56,766 INFO [finetune.py:976] (0/7) Epoch 27, batch 3750, loss[loss=0.1728, simple_loss=0.2435, pruned_loss=0.05106, over 4059.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2418, pruned_loss=0.04708, over 954389.91 frames. ], batch size: 65, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:23:04,996 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.032e+02 1.529e+02 1.747e+02 2.097e+02 4.287e+02, threshold=3.495e+02, percent-clipped=2.0 +2023-04-28 01:24:08,007 INFO [finetune.py:976] (0/7) Epoch 27, batch 3800, loss[loss=0.173, simple_loss=0.2467, pruned_loss=0.04969, over 4890.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2428, pruned_loss=0.04733, over 954326.41 frames. ], batch size: 35, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:24:11,822 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=152726.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:24:20,665 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.44 vs. limit=5.0 +2023-04-28 01:25:13,179 INFO [finetune.py:976] (0/7) Epoch 27, batch 3850, loss[loss=0.1692, simple_loss=0.2397, pruned_loss=0.04933, over 4270.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2414, pruned_loss=0.04638, over 955613.98 frames. ], batch size: 66, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:25:15,593 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.111e+02 1.420e+02 1.784e+02 2.045e+02 3.500e+02, threshold=3.567e+02, percent-clipped=1.0 +2023-04-28 01:25:33,562 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=152787.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:25:37,833 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9404, 3.5565, 1.0094, 2.0554, 2.0905, 2.5054, 2.1023, 1.0491], + device='cuda:0'), covar=tensor([0.1161, 0.0877, 0.1884, 0.1164, 0.0938, 0.0978, 0.1362, 0.1779], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0238, 0.0134, 0.0120, 0.0131, 0.0152, 0.0116, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:25:47,902 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152801.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:26:17,504 INFO [finetune.py:976] (0/7) Epoch 27, batch 3900, loss[loss=0.1508, simple_loss=0.2215, pruned_loss=0.04007, over 4915.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2391, pruned_loss=0.04628, over 954416.53 frames. ], batch size: 46, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:27:04,530 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5100, 1.4403, 1.8194, 1.8264, 1.3619, 1.2474, 1.5248, 1.0521], + device='cuda:0'), covar=tensor([0.0553, 0.0697, 0.0337, 0.0512, 0.0751, 0.1072, 0.0589, 0.0509], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0066, 0.0069, 0.0075, 0.0094, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:27:05,740 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2960, 2.1561, 2.5093, 2.8887, 2.8113, 2.2746, 1.9744, 2.5458], + device='cuda:0'), covar=tensor([0.0855, 0.1118, 0.0686, 0.0564, 0.0599, 0.0857, 0.0768, 0.0567], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0199, 0.0182, 0.0169, 0.0176, 0.0177, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:27:22,908 INFO [finetune.py:976] (0/7) Epoch 27, batch 3950, loss[loss=0.1487, simple_loss=0.214, pruned_loss=0.04167, over 4926.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2368, pruned_loss=0.04575, over 955814.80 frames. ], batch size: 33, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:27:26,326 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.447e+02 1.748e+02 2.157e+02 5.230e+02, threshold=3.496e+02, percent-clipped=2.0 +2023-04-28 01:27:36,693 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152881.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:27:37,438 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.02 vs. limit=5.0 +2023-04-28 01:27:59,532 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6696, 1.2219, 1.3737, 1.4778, 1.3656, 1.5136, 1.3904, 1.4053], + device='cuda:0'), covar=tensor([0.2376, 0.3164, 0.2951, 0.2979, 0.3497, 0.4210, 0.3019, 0.2655], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0374, 0.0329, 0.0340, 0.0349, 0.0394, 0.0360, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 01:28:27,783 INFO [finetune.py:976] (0/7) Epoch 27, batch 4000, loss[loss=0.115, simple_loss=0.1838, pruned_loss=0.02305, over 4676.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2363, pruned_loss=0.04569, over 955550.42 frames. ], batch size: 23, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:28:30,317 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.86 vs. limit=5.0 +2023-04-28 01:28:39,696 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=152929.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:28:39,722 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=152929.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:28:50,571 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152938.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:29:00,129 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=152945.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:29:03,965 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-28 01:29:32,517 INFO [finetune.py:976] (0/7) Epoch 27, batch 4050, loss[loss=0.1823, simple_loss=0.2785, pruned_loss=0.04303, over 4871.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2388, pruned_loss=0.04685, over 954665.87 frames. ], batch size: 44, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:29:35,455 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.138e+02 1.640e+02 1.918e+02 2.261e+02 4.754e+02, threshold=3.835e+02, percent-clipped=2.0 +2023-04-28 01:29:43,789 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=152977.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:30:36,405 INFO [finetune.py:976] (0/7) Epoch 27, batch 4100, loss[loss=0.1485, simple_loss=0.2152, pruned_loss=0.0409, over 4726.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2412, pruned_loss=0.04734, over 954352.14 frames. ], batch size: 23, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:31:39,170 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5648, 3.1007, 0.8684, 1.7728, 1.6904, 2.3196, 1.8659, 1.1258], + device='cuda:0'), covar=tensor([0.1488, 0.1395, 0.2229, 0.1695, 0.1294, 0.1198, 0.1685, 0.2152], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0239, 0.0135, 0.0121, 0.0132, 0.0152, 0.0117, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:31:40,931 INFO [finetune.py:976] (0/7) Epoch 27, batch 4150, loss[loss=0.1914, simple_loss=0.2671, pruned_loss=0.05786, over 4904.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.242, pruned_loss=0.04748, over 954656.24 frames. ], batch size: 36, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:31:43,380 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.659e+02 1.933e+02 2.318e+02 5.641e+02, threshold=3.866e+02, percent-clipped=2.0 +2023-04-28 01:31:54,615 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153082.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:32:12,235 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7866, 1.3383, 1.8941, 2.3114, 1.8648, 1.7465, 1.7923, 1.7476], + device='cuda:0'), covar=tensor([0.5162, 0.7454, 0.6836, 0.5778, 0.6522, 0.8858, 0.8588, 0.9221], + device='cuda:0'), in_proj_covar=tensor([0.0443, 0.0423, 0.0519, 0.0508, 0.0471, 0.0507, 0.0508, 0.0524], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:32:22,875 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153101.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:32:43,316 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153117.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:32:45,074 INFO [finetune.py:976] (0/7) Epoch 27, batch 4200, loss[loss=0.1408, simple_loss=0.2198, pruned_loss=0.03091, over 4765.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2422, pruned_loss=0.04709, over 956153.74 frames. ], batch size: 26, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:33:25,749 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.76 vs. limit=5.0 +2023-04-28 01:33:26,253 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=153149.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:33:37,853 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5535, 1.2240, 4.1526, 3.9543, 3.5729, 3.8015, 3.8254, 3.6605], + device='cuda:0'), covar=tensor([0.6781, 0.5829, 0.1062, 0.1321, 0.1058, 0.1737, 0.1964, 0.1354], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0308, 0.0409, 0.0407, 0.0348, 0.0417, 0.0319, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:33:46,401 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-28 01:33:49,691 INFO [finetune.py:976] (0/7) Epoch 27, batch 4250, loss[loss=0.171, simple_loss=0.2446, pruned_loss=0.04874, over 4898.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2394, pruned_loss=0.04594, over 957454.06 frames. ], batch size: 35, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:33:57,255 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.090e+02 1.449e+02 1.716e+02 2.053e+02 3.736e+02, threshold=3.432e+02, percent-clipped=0.0 +2023-04-28 01:33:59,824 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153178.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:34:09,513 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.64 vs. limit=5.0 +2023-04-28 01:34:54,745 INFO [finetune.py:976] (0/7) Epoch 27, batch 4300, loss[loss=0.1519, simple_loss=0.2248, pruned_loss=0.03949, over 4815.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2371, pruned_loss=0.0456, over 955897.48 frames. ], batch size: 51, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:35:06,053 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-28 01:35:17,350 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153238.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:35:23,562 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153245.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:35:38,705 INFO [finetune.py:976] (0/7) Epoch 27, batch 4350, loss[loss=0.1085, simple_loss=0.1789, pruned_loss=0.01907, over 4785.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2338, pruned_loss=0.04438, over 955034.69 frames. ], batch size: 26, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:35:41,101 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.135e+02 1.585e+02 1.848e+02 2.377e+02 5.336e+02, threshold=3.696e+02, percent-clipped=3.0 +2023-04-28 01:35:44,154 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153278.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:35:48,967 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=153286.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:35:54,189 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=153293.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:36:02,967 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2207, 1.5255, 1.3402, 1.7537, 1.6757, 1.9554, 1.3836, 3.6369], + device='cuda:0'), covar=tensor([0.0616, 0.0808, 0.0811, 0.1197, 0.0626, 0.0524, 0.0733, 0.0120], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 01:36:12,550 INFO [finetune.py:976] (0/7) Epoch 27, batch 4400, loss[loss=0.1444, simple_loss=0.2134, pruned_loss=0.03771, over 4752.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.235, pruned_loss=0.04491, over 957212.24 frames. ], batch size: 23, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:36:13,928 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1900, 1.6347, 2.0662, 2.5028, 2.0164, 1.6122, 1.3193, 1.8674], + device='cuda:0'), covar=tensor([0.3067, 0.2894, 0.1580, 0.1898, 0.2477, 0.2488, 0.4050, 0.1844], + device='cuda:0'), in_proj_covar=tensor([0.0294, 0.0246, 0.0229, 0.0315, 0.0222, 0.0235, 0.0229, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 01:36:33,319 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153339.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:37:16,504 INFO [finetune.py:976] (0/7) Epoch 27, batch 4450, loss[loss=0.1476, simple_loss=0.2096, pruned_loss=0.04276, over 4038.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2369, pruned_loss=0.04517, over 957490.11 frames. ], batch size: 17, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:37:18,874 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.171e+02 1.598e+02 1.820e+02 2.307e+02 3.312e+02, threshold=3.640e+02, percent-clipped=0.0 +2023-04-28 01:37:30,071 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153382.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:38:10,738 INFO [finetune.py:976] (0/7) Epoch 27, batch 4500, loss[loss=0.1643, simple_loss=0.2462, pruned_loss=0.0412, over 4898.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2387, pruned_loss=0.04592, over 953697.60 frames. ], batch size: 35, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:38:11,463 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153421.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:38:16,905 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=153430.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:38:34,958 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-28 01:38:43,733 INFO [finetune.py:976] (0/7) Epoch 27, batch 4550, loss[loss=0.1728, simple_loss=0.2549, pruned_loss=0.04531, over 4904.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.24, pruned_loss=0.04575, over 955238.77 frames. ], batch size: 37, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:38:45,574 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153473.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:38:46,112 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.179e+01 1.577e+02 1.859e+02 2.196e+02 3.775e+02, threshold=3.717e+02, percent-clipped=3.0 +2023-04-28 01:38:51,091 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153482.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:38:59,546 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3870, 1.7426, 1.8155, 1.9190, 1.8076, 1.8055, 1.8986, 1.8594], + device='cuda:0'), covar=tensor([0.4116, 0.5736, 0.4715, 0.4429, 0.5459, 0.7044, 0.5377, 0.4987], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0374, 0.0330, 0.0340, 0.0350, 0.0393, 0.0360, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 01:39:15,748 INFO [finetune.py:976] (0/7) Epoch 27, batch 4600, loss[loss=0.1861, simple_loss=0.2499, pruned_loss=0.06117, over 4718.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2401, pruned_loss=0.04572, over 954791.24 frames. ], batch size: 54, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:39:32,192 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9588, 1.5404, 1.4715, 1.7729, 2.0944, 1.7064, 1.5389, 1.4129], + device='cuda:0'), covar=tensor([0.1574, 0.1433, 0.2045, 0.1205, 0.0994, 0.1729, 0.2061, 0.2639], + device='cuda:0'), in_proj_covar=tensor([0.0319, 0.0311, 0.0356, 0.0289, 0.0331, 0.0309, 0.0304, 0.0380], + device='cuda:0'), out_proj_covar=tensor([6.5036e-05, 6.3828e-05, 7.4717e-05, 5.7889e-05, 6.7666e-05, 6.4471e-05, + 6.3026e-05, 8.0407e-05], device='cuda:0') +2023-04-28 01:39:44,492 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.75 vs. limit=2.0 +2023-04-28 01:39:48,983 INFO [finetune.py:976] (0/7) Epoch 27, batch 4650, loss[loss=0.1197, simple_loss=0.193, pruned_loss=0.02321, over 4804.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.238, pruned_loss=0.04524, over 955120.20 frames. ], batch size: 25, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:39:51,379 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.905e+01 1.489e+02 1.829e+02 2.275e+02 4.563e+02, threshold=3.657e+02, percent-clipped=2.0 +2023-04-28 01:40:01,106 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3876, 1.6806, 1.4462, 1.8423, 1.8173, 2.0495, 1.5223, 3.6203], + device='cuda:0'), covar=tensor([0.0562, 0.0756, 0.0768, 0.1131, 0.0586, 0.0536, 0.0703, 0.0132], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 01:40:08,343 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9842, 1.0136, 1.2270, 1.1949, 1.0113, 0.9179, 0.9670, 0.4744], + device='cuda:0'), covar=tensor([0.0517, 0.0597, 0.0437, 0.0467, 0.0684, 0.1132, 0.0460, 0.0677], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0066, 0.0069, 0.0075, 0.0095, 0.0072, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:40:27,427 INFO [finetune.py:976] (0/7) Epoch 27, batch 4700, loss[loss=0.148, simple_loss=0.2132, pruned_loss=0.04141, over 4817.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2354, pruned_loss=0.04471, over 954729.97 frames. ], batch size: 25, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:40:38,813 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1285, 1.8714, 2.3963, 2.6160, 2.2049, 2.0699, 2.2523, 2.1666], + device='cuda:0'), covar=tensor([0.4612, 0.7319, 0.6949, 0.5661, 0.6328, 0.9104, 0.9082, 0.9946], + device='cuda:0'), in_proj_covar=tensor([0.0443, 0.0424, 0.0519, 0.0508, 0.0471, 0.0508, 0.0509, 0.0525], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:40:41,148 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153634.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:40:50,991 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6553, 0.9955, 1.7150, 2.1611, 1.7674, 1.6008, 1.6509, 1.6317], + device='cuda:0'), covar=tensor([0.4132, 0.6787, 0.5822, 0.5215, 0.5339, 0.7258, 0.6996, 0.8580], + device='cuda:0'), in_proj_covar=tensor([0.0443, 0.0424, 0.0519, 0.0508, 0.0470, 0.0508, 0.0509, 0.0525], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:41:06,299 INFO [finetune.py:976] (0/7) Epoch 27, batch 4750, loss[loss=0.1746, simple_loss=0.2427, pruned_loss=0.05328, over 4766.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2341, pruned_loss=0.04458, over 953714.39 frames. ], batch size: 27, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:41:08,702 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.046e+02 1.498e+02 1.801e+02 2.144e+02 5.776e+02, threshold=3.603e+02, percent-clipped=1.0 +2023-04-28 01:41:39,700 INFO [finetune.py:976] (0/7) Epoch 27, batch 4800, loss[loss=0.1584, simple_loss=0.2237, pruned_loss=0.04649, over 4413.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2377, pruned_loss=0.0458, over 953967.08 frames. ], batch size: 18, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:41:50,076 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6280, 0.9593, 1.2757, 1.1571, 1.6288, 1.2778, 1.0527, 1.3043], + device='cuda:0'), covar=tensor([0.1649, 0.1712, 0.2015, 0.1421, 0.0950, 0.1701, 0.2129, 0.2733], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0309, 0.0353, 0.0287, 0.0329, 0.0307, 0.0303, 0.0378], + device='cuda:0'), out_proj_covar=tensor([6.4459e-05, 6.3479e-05, 7.4094e-05, 5.7482e-05, 6.7267e-05, 6.4067e-05, + 6.2638e-05, 8.0015e-05], device='cuda:0') +2023-04-28 01:42:13,212 INFO [finetune.py:976] (0/7) Epoch 27, batch 4850, loss[loss=0.2247, simple_loss=0.2729, pruned_loss=0.08827, over 4823.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2403, pruned_loss=0.04643, over 955089.94 frames. ], batch size: 40, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:42:15,104 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153773.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:42:16,120 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.558e+02 1.785e+02 2.146e+02 3.572e+02, threshold=3.570e+02, percent-clipped=0.0 +2023-04-28 01:42:17,988 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=153777.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:42:58,243 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2298, 1.7321, 2.1139, 2.5573, 2.1053, 1.6520, 1.4171, 1.9416], + device='cuda:0'), covar=tensor([0.2979, 0.2979, 0.1609, 0.2166, 0.2512, 0.2545, 0.4175, 0.1963], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0247, 0.0229, 0.0316, 0.0223, 0.0235, 0.0229, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 01:43:05,705 INFO [finetune.py:976] (0/7) Epoch 27, batch 4900, loss[loss=0.195, simple_loss=0.2669, pruned_loss=0.06151, over 4892.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2416, pruned_loss=0.04684, over 954955.31 frames. ], batch size: 35, lr: 2.91e-03, grad_scale: 32.0 +2023-04-28 01:43:06,382 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=153821.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:43:13,723 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0816, 1.4753, 1.2264, 1.7239, 1.5924, 1.7251, 1.3363, 3.3429], + device='cuda:0'), covar=tensor([0.0678, 0.0861, 0.0871, 0.1228, 0.0665, 0.0523, 0.0781, 0.0173], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 01:43:26,471 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8410, 1.1093, 3.2739, 3.0473, 2.9324, 3.1777, 3.1840, 2.9033], + device='cuda:0'), covar=tensor([0.7322, 0.5463, 0.1463, 0.2030, 0.1288, 0.1849, 0.1771, 0.1653], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0308, 0.0407, 0.0406, 0.0347, 0.0415, 0.0316, 0.0362], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:44:09,304 INFO [finetune.py:976] (0/7) Epoch 27, batch 4950, loss[loss=0.1581, simple_loss=0.211, pruned_loss=0.05261, over 4090.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2423, pruned_loss=0.04674, over 955028.68 frames. ], batch size: 17, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:44:18,030 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.601e+02 1.869e+02 2.254e+02 5.628e+02, threshold=3.738e+02, percent-clipped=5.0 +2023-04-28 01:44:32,881 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.53 vs. limit=5.0 +2023-04-28 01:45:02,659 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.94 vs. limit=2.0 +2023-04-28 01:45:13,176 INFO [finetune.py:976] (0/7) Epoch 27, batch 5000, loss[loss=0.1867, simple_loss=0.2536, pruned_loss=0.05987, over 4751.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2409, pruned_loss=0.0468, over 953520.91 frames. ], batch size: 59, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:45:15,663 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=153923.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:45:34,543 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=153934.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:45:44,073 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6911, 1.7462, 0.8976, 1.3894, 1.9030, 1.5652, 1.4805, 1.5646], + device='cuda:0'), covar=tensor([0.0462, 0.0353, 0.0312, 0.0533, 0.0261, 0.0489, 0.0450, 0.0561], + device='cuda:0'), in_proj_covar=tensor([0.0027, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0039, 0.0053, 0.0039, 0.0051, 0.0051, 0.0052], + device='cuda:0') +2023-04-28 01:46:15,780 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-28 01:46:17,425 INFO [finetune.py:976] (0/7) Epoch 27, batch 5050, loss[loss=0.1491, simple_loss=0.2197, pruned_loss=0.03924, over 4764.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.238, pruned_loss=0.04628, over 956210.90 frames. ], batch size: 27, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:46:25,382 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.912e+01 1.615e+02 1.861e+02 2.291e+02 3.934e+02, threshold=3.722e+02, percent-clipped=1.0 +2023-04-28 01:46:32,334 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=153982.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:46:33,615 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=153984.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:46:37,294 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.29 vs. limit=2.0 +2023-04-28 01:46:44,001 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-154000.pt +2023-04-28 01:46:57,620 INFO [finetune.py:976] (0/7) Epoch 27, batch 5100, loss[loss=0.1619, simple_loss=0.2211, pruned_loss=0.05133, over 4726.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2351, pruned_loss=0.04551, over 956440.06 frames. ], batch size: 23, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:47:08,928 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.05 vs. limit=5.0 +2023-04-28 01:47:18,491 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154049.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 01:47:31,249 INFO [finetune.py:976] (0/7) Epoch 27, batch 5150, loss[loss=0.1661, simple_loss=0.2448, pruned_loss=0.04366, over 4806.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2354, pruned_loss=0.04568, over 957919.17 frames. ], batch size: 45, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:47:33,691 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.527e+01 1.424e+02 1.690e+02 2.200e+02 4.419e+02, threshold=3.381e+02, percent-clipped=1.0 +2023-04-28 01:47:35,063 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154076.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:47:35,616 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154077.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:47:36,867 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4937, 2.1921, 1.7927, 1.8998, 2.2001, 1.8804, 2.5466, 1.6482], + device='cuda:0'), covar=tensor([0.3119, 0.1593, 0.4187, 0.2462, 0.1611, 0.2038, 0.1508, 0.3969], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0353, 0.0421, 0.0349, 0.0378, 0.0373, 0.0367, 0.0419], + device='cuda:0'), out_proj_covar=tensor([9.9436e-05, 1.0497e-04, 1.2750e-04, 1.0450e-04, 1.1184e-04, 1.1091e-04, + 1.0718e-04, 1.2597e-04], device='cuda:0') +2023-04-28 01:47:40,841 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154084.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:48:13,704 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-28 01:48:15,452 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154110.0, num_to_drop=1, layers_to_drop={3} +2023-04-28 01:48:16,048 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0071, 2.4104, 2.0328, 1.9265, 1.6079, 1.6485, 2.0924, 1.5303], + device='cuda:0'), covar=tensor([0.1633, 0.1290, 0.1427, 0.1494, 0.2233, 0.1805, 0.0967, 0.1945], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0211, 0.0170, 0.0206, 0.0202, 0.0187, 0.0157, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 01:48:26,894 INFO [finetune.py:976] (0/7) Epoch 27, batch 5200, loss[loss=0.1588, simple_loss=0.2515, pruned_loss=0.03309, over 4816.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2391, pruned_loss=0.04672, over 955912.97 frames. ], batch size: 51, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:48:35,423 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=154125.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:48:47,713 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6544, 3.3040, 2.7889, 3.1881, 2.3814, 2.9757, 2.9496, 2.2626], + device='cuda:0'), covar=tensor([0.1846, 0.1060, 0.0816, 0.1068, 0.2655, 0.1018, 0.1537, 0.2607], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0300, 0.0215, 0.0275, 0.0313, 0.0252, 0.0246, 0.0262], + device='cuda:0'), out_proj_covar=tensor([1.1258e-04, 1.1813e-04, 8.4566e-05, 1.0820e-04, 1.2612e-04, 9.9188e-05, + 9.9078e-05, 1.0318e-04], device='cuda:0') +2023-04-28 01:48:49,943 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154137.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 01:49:00,249 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154145.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:49:31,679 INFO [finetune.py:976] (0/7) Epoch 27, batch 5250, loss[loss=0.1582, simple_loss=0.244, pruned_loss=0.03619, over 4862.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2414, pruned_loss=0.04711, over 954934.68 frames. ], batch size: 31, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:49:34,130 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.088e+02 1.655e+02 1.963e+02 2.223e+02 5.480e+02, threshold=3.927e+02, percent-clipped=5.0 +2023-04-28 01:50:15,421 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4840, 1.7792, 1.9611, 2.0167, 1.8705, 1.9112, 2.0023, 2.0041], + device='cuda:0'), covar=tensor([0.3694, 0.5405, 0.4307, 0.4233, 0.5355, 0.6829, 0.5032, 0.4673], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0375, 0.0330, 0.0341, 0.0351, 0.0394, 0.0361, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 01:50:25,596 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.13 vs. limit=2.0 +2023-04-28 01:50:35,790 INFO [finetune.py:976] (0/7) Epoch 27, batch 5300, loss[loss=0.1987, simple_loss=0.2817, pruned_loss=0.05783, over 4830.00 frames. ], tot_loss[loss=0.17, simple_loss=0.2441, pruned_loss=0.04798, over 956587.36 frames. ], batch size: 47, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:50:36,512 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154221.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:50:48,386 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8076, 4.0482, 0.8838, 2.0652, 2.3020, 2.6325, 2.2354, 0.9439], + device='cuda:0'), covar=tensor([0.1344, 0.0862, 0.1967, 0.1212, 0.1000, 0.1063, 0.1483, 0.2267], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0238, 0.0135, 0.0120, 0.0131, 0.0152, 0.0117, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:51:41,568 INFO [finetune.py:976] (0/7) Epoch 27, batch 5350, loss[loss=0.155, simple_loss=0.2281, pruned_loss=0.04097, over 4890.00 frames. ], tot_loss[loss=0.17, simple_loss=0.244, pruned_loss=0.04805, over 955045.79 frames. ], batch size: 32, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:51:49,114 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.757e+01 1.510e+02 1.847e+02 2.221e+02 4.452e+02, threshold=3.694e+02, percent-clipped=2.0 +2023-04-28 01:51:52,263 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154279.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:51:54,121 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154282.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 01:51:55,987 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0539, 2.6664, 2.2926, 2.5444, 1.8453, 2.3616, 2.1959, 1.7793], + device='cuda:0'), covar=tensor([0.2053, 0.1201, 0.0776, 0.1152, 0.3282, 0.1035, 0.1876, 0.2406], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0300, 0.0215, 0.0275, 0.0313, 0.0253, 0.0246, 0.0261], + device='cuda:0'), out_proj_covar=tensor([1.1242e-04, 1.1811e-04, 8.4489e-05, 1.0814e-04, 1.2598e-04, 9.9416e-05, + 9.9270e-05, 1.0290e-04], device='cuda:0') +2023-04-28 01:51:56,600 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154286.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:52:00,131 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154291.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:52:05,777 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7728, 1.2967, 1.9063, 2.2939, 1.8899, 1.7932, 1.8381, 1.7557], + device='cuda:0'), covar=tensor([0.4393, 0.6648, 0.5279, 0.5205, 0.5683, 0.7222, 0.7020, 0.8917], + device='cuda:0'), in_proj_covar=tensor([0.0441, 0.0423, 0.0517, 0.0506, 0.0472, 0.0507, 0.0508, 0.0524], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:52:19,625 INFO [finetune.py:976] (0/7) Epoch 27, batch 5400, loss[loss=0.1428, simple_loss=0.213, pruned_loss=0.03631, over 4771.00 frames. ], tot_loss[loss=0.1682, simple_loss=0.2414, pruned_loss=0.04753, over 955967.48 frames. ], batch size: 28, lr: 2.90e-03, grad_scale: 64.0 +2023-04-28 01:52:30,051 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154337.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 01:52:37,196 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154347.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:52:41,235 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154352.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:52:52,200 INFO [finetune.py:976] (0/7) Epoch 27, batch 5450, loss[loss=0.1543, simple_loss=0.2287, pruned_loss=0.04002, over 4929.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2377, pruned_loss=0.04623, over 955954.11 frames. ], batch size: 38, lr: 2.90e-03, grad_scale: 64.0 +2023-04-28 01:52:54,630 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.131e+02 1.557e+02 1.812e+02 2.063e+02 3.462e+02, threshold=3.624e+02, percent-clipped=0.0 +2023-04-28 01:53:09,464 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154398.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 01:53:10,584 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5196, 1.7635, 1.7916, 2.1365, 2.0688, 2.3673, 1.8171, 4.6410], + device='cuda:0'), covar=tensor([0.0522, 0.0786, 0.0736, 0.1148, 0.0605, 0.0437, 0.0699, 0.0089], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0042, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 01:53:16,760 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154405.0, num_to_drop=1, layers_to_drop={3} +2023-04-28 01:53:31,464 INFO [finetune.py:976] (0/7) Epoch 27, batch 5500, loss[loss=0.1607, simple_loss=0.229, pruned_loss=0.04622, over 4809.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2347, pruned_loss=0.04514, over 954455.19 frames. ], batch size: 25, lr: 2.90e-03, grad_scale: 64.0 +2023-04-28 01:53:44,361 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154432.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 01:53:54,047 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154440.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:54:08,420 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.77 vs. limit=2.0 +2023-04-28 01:54:35,187 INFO [finetune.py:976] (0/7) Epoch 27, batch 5550, loss[loss=0.1458, simple_loss=0.2198, pruned_loss=0.03587, over 4767.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2362, pruned_loss=0.04524, over 955499.96 frames. ], batch size: 27, lr: 2.90e-03, grad_scale: 64.0 +2023-04-28 01:54:37,625 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.014e+02 1.508e+02 1.736e+02 2.106e+02 4.174e+02, threshold=3.472e+02, percent-clipped=1.0 +2023-04-28 01:54:37,758 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154474.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:54:49,270 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6693, 1.4430, 1.6515, 1.9851, 1.9704, 1.5931, 1.4143, 1.7604], + device='cuda:0'), covar=tensor([0.0793, 0.1369, 0.0820, 0.0604, 0.0662, 0.0831, 0.0759, 0.0602], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0203, 0.0185, 0.0172, 0.0179, 0.0179, 0.0152, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:54:57,043 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.36 vs. limit=2.0 +2023-04-28 01:55:31,960 INFO [finetune.py:976] (0/7) Epoch 27, batch 5600, loss[loss=0.1922, simple_loss=0.2801, pruned_loss=0.05214, over 4811.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2395, pruned_loss=0.04611, over 954696.83 frames. ], batch size: 41, lr: 2.90e-03, grad_scale: 64.0 +2023-04-28 01:55:50,545 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154535.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:56:35,597 INFO [finetune.py:976] (0/7) Epoch 27, batch 5650, loss[loss=0.1924, simple_loss=0.2769, pruned_loss=0.0539, over 4814.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2422, pruned_loss=0.04623, over 956855.34 frames. ], batch size: 39, lr: 2.90e-03, grad_scale: 64.0 +2023-04-28 01:56:42,718 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.767e+01 1.509e+02 1.920e+02 2.295e+02 5.018e+02, threshold=3.840e+02, percent-clipped=4.0 +2023-04-28 01:56:42,883 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-28 01:56:44,487 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154577.0, num_to_drop=1, layers_to_drop={3} +2023-04-28 01:56:45,670 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154579.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:57:18,294 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.4349, 2.9382, 2.7899, 2.8596, 2.7027, 2.8784, 2.8856, 2.8516], + device='cuda:0'), covar=tensor([0.2840, 0.4775, 0.4443, 0.4092, 0.4973, 0.5533, 0.4875, 0.4766], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0375, 0.0330, 0.0341, 0.0351, 0.0394, 0.0361, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 01:57:24,517 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.49 vs. limit=2.0 +2023-04-28 01:57:36,100 INFO [finetune.py:976] (0/7) Epoch 27, batch 5700, loss[loss=0.1222, simple_loss=0.19, pruned_loss=0.02725, over 3866.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2388, pruned_loss=0.04627, over 934380.07 frames. ], batch size: 17, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:57:45,550 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=154627.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:57:46,193 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154628.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:57:59,104 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154642.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:58:02,950 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-27.pt +2023-04-28 01:58:12,013 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154647.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:58:12,561 INFO [finetune.py:976] (0/7) Epoch 28, batch 0, loss[loss=0.1655, simple_loss=0.241, pruned_loss=0.04497, over 4878.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.241, pruned_loss=0.04497, over 4878.00 frames. ], batch size: 32, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:58:12,562 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-28 01:58:17,458 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5691, 1.2818, 1.3518, 1.3450, 1.7325, 1.4465, 1.1931, 1.3439], + device='cuda:0'), covar=tensor([0.1992, 0.1405, 0.2025, 0.1465, 0.0965, 0.1545, 0.1967, 0.2414], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0306, 0.0350, 0.0286, 0.0325, 0.0304, 0.0300, 0.0375], + device='cuda:0'), out_proj_covar=tensor([6.3781e-05, 6.2941e-05, 7.3412e-05, 5.7246e-05, 6.6436e-05, 6.3401e-05, + 6.2023e-05, 7.9490e-05], device='cuda:0') +2023-04-28 01:58:29,429 INFO [finetune.py:1010] (0/7) Epoch 28, validation: loss=0.1549, simple_loss=0.224, pruned_loss=0.04297, over 2265189.00 frames. +2023-04-28 01:58:29,430 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-28 01:58:33,227 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1806, 2.6570, 1.0926, 1.5002, 1.9508, 1.3753, 3.4906, 2.1029], + device='cuda:0'), covar=tensor([0.0658, 0.0602, 0.0798, 0.1171, 0.0555, 0.0975, 0.0269, 0.0553], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0064, 0.0047, 0.0046, 0.0050, 0.0051, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 01:58:43,434 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6435, 3.4214, 1.0975, 1.9203, 1.9390, 2.5556, 2.0360, 1.3320], + device='cuda:0'), covar=tensor([0.1685, 0.1395, 0.2269, 0.1750, 0.1282, 0.1253, 0.1745, 0.1848], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0240, 0.0136, 0.0121, 0.0132, 0.0152, 0.0118, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 01:58:55,769 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.710e+01 1.461e+02 1.801e+02 2.323e+02 7.600e+02, threshold=3.601e+02, percent-clipped=3.0 +2023-04-28 01:59:03,269 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8182, 1.6987, 1.9149, 2.1651, 2.2010, 1.7845, 1.6575, 1.9465], + device='cuda:0'), covar=tensor([0.0835, 0.1154, 0.0701, 0.0558, 0.0616, 0.0845, 0.0683, 0.0526], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0204, 0.0186, 0.0173, 0.0180, 0.0179, 0.0153, 0.0180], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:59:04,493 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154689.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:59:06,856 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154693.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 01:59:09,838 INFO [finetune.py:976] (0/7) Epoch 28, batch 50, loss[loss=0.1557, simple_loss=0.2362, pruned_loss=0.03756, over 4922.00 frames. ], tot_loss[loss=0.1715, simple_loss=0.2436, pruned_loss=0.04966, over 216085.52 frames. ], batch size: 33, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:59:17,114 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154705.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 01:59:20,233 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4650, 1.7666, 1.9388, 2.0484, 1.9212, 1.9489, 2.0371, 2.0185], + device='cuda:0'), covar=tensor([0.3532, 0.4947, 0.4036, 0.4099, 0.5037, 0.6479, 0.4672, 0.4120], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0374, 0.0329, 0.0341, 0.0350, 0.0394, 0.0360, 0.0333], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 01:59:26,321 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154720.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:59:33,609 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154732.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:59:38,579 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154740.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 01:59:43,316 INFO [finetune.py:976] (0/7) Epoch 28, batch 100, loss[loss=0.1269, simple_loss=0.201, pruned_loss=0.02637, over 4898.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.2366, pruned_loss=0.0455, over 382071.25 frames. ], batch size: 43, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 01:59:46,226 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0188, 1.1762, 3.1964, 2.7578, 2.8697, 2.9988, 3.0770, 2.6930], + device='cuda:0'), covar=tensor([0.9030, 0.7517, 0.2504, 0.3995, 0.2808, 0.3448, 0.3325, 0.3751], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0311, 0.0409, 0.0411, 0.0351, 0.0419, 0.0320, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 01:59:48,359 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=154753.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 01:59:50,138 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9945, 1.8062, 1.9945, 2.2875, 2.4099, 1.8497, 1.5689, 2.0414], + device='cuda:0'), covar=tensor([0.0768, 0.1130, 0.0755, 0.0661, 0.0520, 0.0798, 0.0714, 0.0557], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0203, 0.0185, 0.0172, 0.0179, 0.0179, 0.0152, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:00:02,206 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.686e+01 1.538e+02 1.845e+02 2.159e+02 3.671e+02, threshold=3.690e+02, percent-clipped=1.0 +2023-04-28 02:00:05,342 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=154780.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:00:05,994 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=154781.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:00:10,225 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=154788.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:00:16,273 INFO [finetune.py:976] (0/7) Epoch 28, batch 150, loss[loss=0.1836, simple_loss=0.2497, pruned_loss=0.05878, over 4895.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.233, pruned_loss=0.04424, over 510635.74 frames. ], batch size: 35, lr: 2.90e-03, grad_scale: 32.0 +2023-04-28 02:00:21,913 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.44 vs. limit=2.0 +2023-04-28 02:00:29,325 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9557, 2.4966, 1.9496, 2.0662, 1.4078, 1.4281, 2.0343, 1.3459], + device='cuda:0'), covar=tensor([0.1534, 0.1326, 0.1305, 0.1500, 0.2217, 0.1828, 0.0859, 0.1947], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0210, 0.0171, 0.0205, 0.0201, 0.0187, 0.0157, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 02:00:30,064 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-28 02:00:31,775 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-28 02:00:38,339 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154830.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:00:49,253 INFO [finetune.py:976] (0/7) Epoch 28, batch 200, loss[loss=0.1518, simple_loss=0.2372, pruned_loss=0.0332, over 4772.00 frames. ], tot_loss[loss=0.1583, simple_loss=0.23, pruned_loss=0.04334, over 610701.49 frames. ], batch size: 28, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:01:08,233 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.084e+02 1.505e+02 1.793e+02 2.296e+02 3.844e+02, threshold=3.586e+02, percent-clipped=2.0 +2023-04-28 02:01:09,540 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154877.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 02:01:22,149 INFO [finetune.py:976] (0/7) Epoch 28, batch 250, loss[loss=0.1421, simple_loss=0.2042, pruned_loss=0.03996, over 4701.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.236, pruned_loss=0.04556, over 687574.17 frames. ], batch size: 23, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:01:35,635 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.66 vs. limit=5.0 +2023-04-28 02:01:41,470 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=154925.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:01:51,914 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154942.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:01:54,966 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154947.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:02:00,647 INFO [finetune.py:976] (0/7) Epoch 28, batch 300, loss[loss=0.1512, simple_loss=0.2346, pruned_loss=0.03385, over 4840.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2395, pruned_loss=0.04641, over 748411.60 frames. ], batch size: 49, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:02:11,859 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6854, 1.5730, 1.2521, 1.6549, 1.9900, 1.5711, 1.4165, 1.1943], + device='cuda:0'), covar=tensor([0.1903, 0.1398, 0.1965, 0.1287, 0.0851, 0.1937, 0.2206, 0.2703], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0308, 0.0351, 0.0286, 0.0326, 0.0305, 0.0301, 0.0376], + device='cuda:0'), out_proj_covar=tensor([6.4256e-05, 6.3268e-05, 7.3557e-05, 5.7292e-05, 6.6496e-05, 6.3549e-05, + 6.2309e-05, 7.9693e-05], device='cuda:0') +2023-04-28 02:02:36,612 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.011e+02 1.547e+02 1.784e+02 2.160e+02 3.859e+02, threshold=3.568e+02, percent-clipped=1.0 +2023-04-28 02:02:37,342 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=154976.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:02:47,503 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=154984.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:02:56,113 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=154990.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:02:57,997 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=154993.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 02:02:59,161 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=154995.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:03:06,551 INFO [finetune.py:976] (0/7) Epoch 28, batch 350, loss[loss=0.1442, simple_loss=0.2326, pruned_loss=0.02786, over 4868.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2411, pruned_loss=0.04662, over 795013.73 frames. ], batch size: 34, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:03:28,789 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5016, 1.9361, 2.4332, 3.1260, 2.3782, 1.8455, 1.8427, 2.4006], + device='cuda:0'), covar=tensor([0.2883, 0.3036, 0.1560, 0.1972, 0.2565, 0.2536, 0.3524, 0.1804], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0248, 0.0230, 0.0316, 0.0224, 0.0237, 0.0230, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 02:03:33,787 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9725, 1.6003, 4.0820, 3.8535, 3.5706, 3.7672, 3.6586, 3.6724], + device='cuda:0'), covar=tensor([0.6260, 0.4952, 0.1122, 0.1548, 0.1089, 0.1637, 0.4568, 0.1367], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0310, 0.0408, 0.0410, 0.0350, 0.0418, 0.0319, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:03:45,311 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155037.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:03:47,613 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=155041.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 02:03:51,699 INFO [finetune.py:976] (0/7) Epoch 28, batch 400, loss[loss=0.1552, simple_loss=0.2254, pruned_loss=0.04252, over 4755.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2424, pruned_loss=0.04708, over 829777.34 frames. ], batch size: 27, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:04:06,194 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155069.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:04:11,280 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.089e+02 1.524e+02 1.961e+02 2.438e+02 3.962e+02, threshold=3.922e+02, percent-clipped=3.0 +2023-04-28 02:04:11,989 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155076.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:04:13,377 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.74 vs. limit=5.0 +2023-04-28 02:04:25,389 INFO [finetune.py:976] (0/7) Epoch 28, batch 450, loss[loss=0.1378, simple_loss=0.2219, pruned_loss=0.02685, over 4818.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2405, pruned_loss=0.0462, over 855657.59 frames. ], batch size: 33, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:04:48,043 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155130.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:04:48,090 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155130.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:04:57,388 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.02 vs. limit=5.0 +2023-04-28 02:04:58,936 INFO [finetune.py:976] (0/7) Epoch 28, batch 500, loss[loss=0.1378, simple_loss=0.21, pruned_loss=0.03281, over 4780.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2382, pruned_loss=0.04569, over 877053.10 frames. ], batch size: 26, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:05:15,803 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.7968, 1.7280, 1.8002, 1.3918, 1.8467, 1.4564, 2.3445, 1.5730], + device='cuda:0'), covar=tensor([0.3888, 0.1937, 0.4860, 0.2976, 0.1673, 0.2451, 0.1674, 0.4646], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0357, 0.0424, 0.0352, 0.0381, 0.0377, 0.0372, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:05:17,835 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.043e+02 1.583e+02 1.761e+02 2.203e+02 6.801e+02, threshold=3.523e+02, percent-clipped=1.0 +2023-04-28 02:05:20,240 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=155178.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:05:21,499 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3688, 1.8893, 1.8186, 2.2068, 2.1896, 2.2617, 1.8141, 4.6132], + device='cuda:0'), covar=tensor([0.0543, 0.0757, 0.0737, 0.1131, 0.0567, 0.0449, 0.0650, 0.0108], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0042, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 02:05:32,273 INFO [finetune.py:976] (0/7) Epoch 28, batch 550, loss[loss=0.1755, simple_loss=0.2337, pruned_loss=0.05861, over 4935.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2345, pruned_loss=0.04455, over 893893.26 frames. ], batch size: 33, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:05:44,719 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6346, 1.4742, 1.8866, 2.0443, 1.4549, 1.3527, 1.6168, 0.9316], + device='cuda:0'), covar=tensor([0.0543, 0.0664, 0.0345, 0.0488, 0.0761, 0.1215, 0.0569, 0.0618], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0066, 0.0069, 0.0075, 0.0095, 0.0073, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 02:06:05,935 INFO [finetune.py:976] (0/7) Epoch 28, batch 600, loss[loss=0.231, simple_loss=0.3051, pruned_loss=0.07845, over 4815.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2364, pruned_loss=0.0459, over 907335.06 frames. ], batch size: 51, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:06:23,749 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.531e+01 1.578e+02 2.066e+02 2.357e+02 4.358e+02, threshold=4.132e+02, percent-clipped=3.0 +2023-04-28 02:06:30,725 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155284.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:06:39,098 INFO [finetune.py:976] (0/7) Epoch 28, batch 650, loss[loss=0.1656, simple_loss=0.2425, pruned_loss=0.04439, over 4915.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2383, pruned_loss=0.04609, over 914723.64 frames. ], batch size: 36, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:06:41,046 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2503, 1.8131, 2.0636, 2.5931, 2.6115, 2.0761, 1.8904, 2.4190], + device='cuda:0'), covar=tensor([0.0790, 0.1297, 0.0846, 0.0639, 0.0577, 0.0904, 0.0726, 0.0530], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0201, 0.0183, 0.0170, 0.0177, 0.0176, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:07:02,743 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=155332.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:07:02,753 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155332.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:07:12,880 INFO [finetune.py:976] (0/7) Epoch 28, batch 700, loss[loss=0.1834, simple_loss=0.2586, pruned_loss=0.05408, over 4894.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2394, pruned_loss=0.04634, over 923459.63 frames. ], batch size: 32, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:07:18,032 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.48 vs. limit=2.0 +2023-04-28 02:07:34,633 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.108e+02 1.574e+02 1.852e+02 2.166e+02 4.284e+02, threshold=3.703e+02, percent-clipped=1.0 +2023-04-28 02:07:40,191 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155376.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:08:09,455 INFO [finetune.py:976] (0/7) Epoch 28, batch 750, loss[loss=0.1843, simple_loss=0.2701, pruned_loss=0.0492, over 4902.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2417, pruned_loss=0.04719, over 930311.62 frames. ], batch size: 36, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:08:41,159 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=155424.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:08:41,841 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155425.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:09:12,223 INFO [finetune.py:976] (0/7) Epoch 28, batch 800, loss[loss=0.1546, simple_loss=0.2068, pruned_loss=0.05118, over 4053.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2418, pruned_loss=0.0472, over 933942.55 frames. ], batch size: 17, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:09:22,991 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155457.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:09:42,706 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.932e+01 1.490e+02 1.749e+02 2.216e+02 3.602e+02, threshold=3.499e+02, percent-clipped=0.0 +2023-04-28 02:09:50,975 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2028, 1.4922, 1.4121, 1.7207, 1.6248, 1.6481, 1.3659, 2.9962], + device='cuda:0'), covar=tensor([0.0641, 0.0858, 0.0793, 0.1200, 0.0623, 0.0496, 0.0778, 0.0212], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0042, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 02:09:59,777 INFO [finetune.py:976] (0/7) Epoch 28, batch 850, loss[loss=0.1817, simple_loss=0.2406, pruned_loss=0.06143, over 4873.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.24, pruned_loss=0.04682, over 938001.18 frames. ], batch size: 32, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:10:12,000 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155518.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:10:32,814 INFO [finetune.py:976] (0/7) Epoch 28, batch 900, loss[loss=0.1636, simple_loss=0.2287, pruned_loss=0.04926, over 4058.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2372, pruned_loss=0.04588, over 941940.76 frames. ], batch size: 17, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:10:49,542 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.500e+01 1.455e+02 1.735e+02 2.204e+02 5.182e+02, threshold=3.469e+02, percent-clipped=3.0 +2023-04-28 02:10:52,693 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6144, 1.3930, 0.5785, 1.3046, 1.3438, 1.4704, 1.3759, 1.3906], + device='cuda:0'), covar=tensor([0.0475, 0.0381, 0.0378, 0.0554, 0.0303, 0.0504, 0.0484, 0.0564], + device='cuda:0'), in_proj_covar=tensor([0.0027, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0039, 0.0053, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 02:11:05,995 INFO [finetune.py:976] (0/7) Epoch 28, batch 950, loss[loss=0.138, simple_loss=0.2093, pruned_loss=0.03339, over 4818.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2347, pruned_loss=0.04508, over 945009.39 frames. ], batch size: 25, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:11:10,949 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155605.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:11:19,471 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0647, 1.3369, 1.2789, 1.5088, 1.4025, 1.4444, 1.2816, 2.4081], + device='cuda:0'), covar=tensor([0.0640, 0.0860, 0.0777, 0.1306, 0.0699, 0.0543, 0.0788, 0.0241], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0042, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 02:11:27,352 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155632.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:11:39,449 INFO [finetune.py:976] (0/7) Epoch 28, batch 1000, loss[loss=0.2133, simple_loss=0.2871, pruned_loss=0.06979, over 4810.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2379, pruned_loss=0.04636, over 948067.80 frames. ], batch size: 45, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:11:45,557 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155657.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:11:51,055 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155666.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:11:56,469 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.942e+01 1.547e+02 1.789e+02 2.038e+02 3.472e+02, threshold=3.578e+02, percent-clipped=1.0 +2023-04-28 02:11:57,805 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1016, 0.7582, 0.9263, 0.8005, 1.1970, 0.9343, 0.8817, 0.9697], + device='cuda:0'), covar=tensor([0.1755, 0.1662, 0.2028, 0.1662, 0.1098, 0.1455, 0.1612, 0.2398], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0308, 0.0352, 0.0286, 0.0326, 0.0306, 0.0301, 0.0377], + device='cuda:0'), out_proj_covar=tensor([6.4443e-05, 6.3347e-05, 7.3907e-05, 5.7337e-05, 6.6549e-05, 6.3708e-05, + 6.2246e-05, 7.9734e-05], device='cuda:0') +2023-04-28 02:11:59,551 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=155680.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:12:12,444 INFO [finetune.py:976] (0/7) Epoch 28, batch 1050, loss[loss=0.1653, simple_loss=0.2147, pruned_loss=0.05802, over 4560.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2415, pruned_loss=0.04733, over 951321.03 frames. ], batch size: 20, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:12:25,715 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155718.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:12:29,947 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=155725.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:12:45,200 INFO [finetune.py:976] (0/7) Epoch 28, batch 1100, loss[loss=0.1614, simple_loss=0.2427, pruned_loss=0.04007, over 4785.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2422, pruned_loss=0.04701, over 952269.75 frames. ], batch size: 29, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:12:48,435 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5814, 1.3061, 4.4982, 4.2219, 3.8883, 4.2710, 4.1404, 4.0203], + device='cuda:0'), covar=tensor([0.7253, 0.5947, 0.1160, 0.1793, 0.1181, 0.1454, 0.1449, 0.1540], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0312, 0.0410, 0.0411, 0.0352, 0.0420, 0.0321, 0.0367], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:13:13,273 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=155773.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:13:13,335 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4282, 1.2644, 1.8281, 1.7704, 1.2757, 1.1428, 1.4874, 1.0764], + device='cuda:0'), covar=tensor([0.0592, 0.0679, 0.0375, 0.0629, 0.0722, 0.1112, 0.0575, 0.0555], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0066, 0.0069, 0.0075, 0.0094, 0.0073, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 02:13:19,856 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.115e+02 1.609e+02 1.883e+02 2.305e+02 3.828e+02, threshold=3.767e+02, percent-clipped=2.0 +2023-04-28 02:13:30,940 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=155784.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:13:46,059 INFO [finetune.py:976] (0/7) Epoch 28, batch 1150, loss[loss=0.1899, simple_loss=0.2544, pruned_loss=0.06269, over 4738.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2428, pruned_loss=0.04667, over 954201.17 frames. ], batch size: 54, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:13:58,498 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155813.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:14:18,107 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=155845.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 02:14:19,787 INFO [finetune.py:976] (0/7) Epoch 28, batch 1200, loss[loss=0.1586, simple_loss=0.228, pruned_loss=0.0446, over 4819.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2409, pruned_loss=0.04613, over 953662.52 frames. ], batch size: 33, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:14:54,753 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.082e+02 1.563e+02 1.837e+02 2.158e+02 4.161e+02, threshold=3.673e+02, percent-clipped=1.0 +2023-04-28 02:15:25,826 INFO [finetune.py:976] (0/7) Epoch 28, batch 1250, loss[loss=0.1416, simple_loss=0.2135, pruned_loss=0.03487, over 4849.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2382, pruned_loss=0.0454, over 955143.88 frames. ], batch size: 49, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:15:51,900 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-28 02:16:21,813 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.86 vs. limit=5.0 +2023-04-28 02:16:24,795 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8602, 2.4365, 2.0416, 1.8274, 1.3507, 1.4253, 2.1321, 1.3673], + device='cuda:0'), covar=tensor([0.1826, 0.1435, 0.1363, 0.1769, 0.2327, 0.2043, 0.0881, 0.2114], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0209, 0.0170, 0.0204, 0.0200, 0.0186, 0.0156, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 02:16:33,390 INFO [finetune.py:976] (0/7) Epoch 28, batch 1300, loss[loss=0.1228, simple_loss=0.2001, pruned_loss=0.02277, over 4745.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2352, pruned_loss=0.04488, over 955602.47 frames. ], batch size: 27, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:16:53,061 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=155961.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:17:07,930 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.397e+01 1.571e+02 1.820e+02 2.327e+02 4.182e+02, threshold=3.640e+02, percent-clipped=2.0 +2023-04-28 02:17:37,315 INFO [finetune.py:976] (0/7) Epoch 28, batch 1350, loss[loss=0.1967, simple_loss=0.2679, pruned_loss=0.06273, over 4688.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2367, pruned_loss=0.04576, over 956310.41 frames. ], batch size: 23, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:17:38,684 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-156000.pt +2023-04-28 02:17:56,783 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156013.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:18:40,867 INFO [finetune.py:976] (0/7) Epoch 28, batch 1400, loss[loss=0.1301, simple_loss=0.2006, pruned_loss=0.02985, over 4725.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2402, pruned_loss=0.04695, over 955009.20 frames. ], batch size: 23, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:19:12,178 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156070.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:19:20,862 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.674e+02 1.966e+02 2.354e+02 4.293e+02, threshold=3.933e+02, percent-clipped=2.0 +2023-04-28 02:19:33,036 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.50 vs. limit=2.0 +2023-04-28 02:19:36,468 INFO [finetune.py:976] (0/7) Epoch 28, batch 1450, loss[loss=0.1638, simple_loss=0.2355, pruned_loss=0.04601, over 4744.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2423, pruned_loss=0.04719, over 955670.08 frames. ], batch size: 27, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:19:46,164 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156113.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:19:59,735 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156131.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 02:20:05,151 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156140.0, num_to_drop=1, layers_to_drop={3} +2023-04-28 02:20:09,927 INFO [finetune.py:976] (0/7) Epoch 28, batch 1500, loss[loss=0.1876, simple_loss=0.2528, pruned_loss=0.06124, over 4822.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2421, pruned_loss=0.04678, over 955229.60 frames. ], batch size: 30, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:20:23,235 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=156161.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:20:27,884 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9234, 1.8489, 2.0134, 2.4121, 2.5057, 1.9809, 1.7942, 2.0479], + device='cuda:0'), covar=tensor([0.0941, 0.1066, 0.0756, 0.0572, 0.0594, 0.0817, 0.0712, 0.0659], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0202, 0.0184, 0.0170, 0.0178, 0.0177, 0.0150, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:20:43,692 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.067e+02 1.591e+02 1.859e+02 2.303e+02 4.450e+02, threshold=3.717e+02, percent-clipped=1.0 +2023-04-28 02:21:08,785 INFO [finetune.py:976] (0/7) Epoch 28, batch 1550, loss[loss=0.1863, simple_loss=0.2621, pruned_loss=0.0552, over 4933.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2422, pruned_loss=0.04656, over 956040.47 frames. ], batch size: 33, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:21:10,299 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5300, 1.8927, 1.7107, 2.3770, 2.5332, 2.0376, 2.0260, 1.8104], + device='cuda:0'), covar=tensor([0.1722, 0.1739, 0.1897, 0.1561, 0.1184, 0.1854, 0.2299, 0.2563], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0307, 0.0351, 0.0285, 0.0325, 0.0306, 0.0301, 0.0376], + device='cuda:0'), out_proj_covar=tensor([6.4283e-05, 6.2959e-05, 7.3680e-05, 5.6984e-05, 6.6261e-05, 6.3808e-05, + 6.2190e-05, 7.9722e-05], device='cuda:0') +2023-04-28 02:21:31,126 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156216.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:22:04,802 INFO [finetune.py:976] (0/7) Epoch 28, batch 1600, loss[loss=0.1717, simple_loss=0.2367, pruned_loss=0.05332, over 4826.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2392, pruned_loss=0.04556, over 955051.05 frames. ], batch size: 39, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:22:12,819 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156261.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:22:24,230 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.128e+02 1.624e+02 1.857e+02 2.215e+02 3.679e+02, threshold=3.713e+02, percent-clipped=0.0 +2023-04-28 02:22:25,628 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156277.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:22:26,443 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.95 vs. limit=2.0 +2023-04-28 02:22:34,024 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156290.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:22:38,827 INFO [finetune.py:976] (0/7) Epoch 28, batch 1650, loss[loss=0.122, simple_loss=0.2015, pruned_loss=0.02124, over 4817.00 frames. ], tot_loss[loss=0.163, simple_loss=0.2368, pruned_loss=0.04463, over 954538.63 frames. ], batch size: 38, lr: 2.89e-03, grad_scale: 32.0 +2023-04-28 02:22:45,568 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=156309.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:22:48,046 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156313.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:23:00,989 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2462, 1.3865, 1.7062, 1.8198, 1.7508, 1.8171, 1.7145, 1.7587], + device='cuda:0'), covar=tensor([0.3424, 0.5080, 0.4059, 0.3826, 0.4953, 0.6478, 0.4861, 0.4271], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0375, 0.0330, 0.0341, 0.0349, 0.0391, 0.0360, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 02:23:12,278 INFO [finetune.py:976] (0/7) Epoch 28, batch 1700, loss[loss=0.2167, simple_loss=0.2695, pruned_loss=0.08194, over 4820.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2345, pruned_loss=0.04391, over 952679.78 frames. ], batch size: 33, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:23:14,243 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156351.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:23:20,258 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=156361.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:23:21,591 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5093, 1.7112, 1.4324, 1.1697, 1.1667, 1.1479, 1.4844, 1.1201], + device='cuda:0'), covar=tensor([0.1812, 0.1345, 0.1462, 0.1653, 0.2270, 0.1934, 0.0982, 0.2083], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0209, 0.0169, 0.0204, 0.0200, 0.0185, 0.0156, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 02:23:28,665 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.471e+01 1.503e+02 1.857e+02 2.212e+02 3.931e+02, threshold=3.714e+02, percent-clipped=1.0 +2023-04-28 02:23:45,148 INFO [finetune.py:976] (0/7) Epoch 28, batch 1750, loss[loss=0.1891, simple_loss=0.2598, pruned_loss=0.05924, over 4747.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2377, pruned_loss=0.04566, over 953657.91 frames. ], batch size: 54, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:24:08,044 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156426.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 02:24:29,101 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156440.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 02:24:39,113 INFO [finetune.py:976] (0/7) Epoch 28, batch 1800, loss[loss=0.2443, simple_loss=0.3054, pruned_loss=0.09159, over 4906.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2398, pruned_loss=0.04583, over 954400.24 frames. ], batch size: 36, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:25:11,114 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.353e+01 1.610e+02 1.909e+02 2.260e+02 4.171e+02, threshold=3.817e+02, percent-clipped=3.0 +2023-04-28 02:25:31,031 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=156488.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:25:42,450 INFO [finetune.py:976] (0/7) Epoch 28, batch 1850, loss[loss=0.1283, simple_loss=0.1959, pruned_loss=0.03031, over 4770.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2411, pruned_loss=0.04646, over 954110.93 frames. ], batch size: 26, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:26:48,597 INFO [finetune.py:976] (0/7) Epoch 28, batch 1900, loss[loss=0.167, simple_loss=0.2335, pruned_loss=0.05029, over 4115.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2418, pruned_loss=0.04675, over 953711.11 frames. ], batch size: 65, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:27:20,581 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156572.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:27:21,874 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7052, 1.5209, 1.7267, 2.0457, 2.0500, 1.6999, 1.4147, 1.8766], + device='cuda:0'), covar=tensor([0.0877, 0.1387, 0.0899, 0.0593, 0.0691, 0.0906, 0.0771, 0.0579], + device='cuda:0'), in_proj_covar=tensor([0.0186, 0.0204, 0.0186, 0.0172, 0.0180, 0.0179, 0.0152, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:27:22,348 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.009e+02 1.575e+02 1.976e+02 2.372e+02 4.648e+02, threshold=3.953e+02, percent-clipped=2.0 +2023-04-28 02:27:24,345 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9284, 2.4917, 2.1449, 1.9118, 1.3674, 1.4742, 2.2355, 1.4258], + device='cuda:0'), covar=tensor([0.1492, 0.1326, 0.1208, 0.1501, 0.2124, 0.1722, 0.0790, 0.1837], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0209, 0.0169, 0.0204, 0.0200, 0.0185, 0.0155, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 02:27:54,788 INFO [finetune.py:976] (0/7) Epoch 28, batch 1950, loss[loss=0.151, simple_loss=0.2212, pruned_loss=0.04043, over 4753.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2395, pruned_loss=0.04572, over 953423.79 frames. ], batch size: 27, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:29:00,496 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=156646.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:29:01,645 INFO [finetune.py:976] (0/7) Epoch 28, batch 2000, loss[loss=0.1653, simple_loss=0.2458, pruned_loss=0.04235, over 4802.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2375, pruned_loss=0.04548, over 954245.97 frames. ], batch size: 29, lr: 2.88e-03, grad_scale: 64.0 +2023-04-28 02:29:30,598 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8845, 1.4880, 1.9390, 2.3631, 2.0017, 1.8375, 1.8706, 1.8469], + device='cuda:0'), covar=tensor([0.4664, 0.6798, 0.6698, 0.5491, 0.5813, 0.8277, 0.8021, 0.9414], + device='cuda:0'), in_proj_covar=tensor([0.0444, 0.0424, 0.0520, 0.0509, 0.0473, 0.0510, 0.0511, 0.0527], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:29:34,748 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.047e+02 1.509e+02 1.833e+02 2.149e+02 4.980e+02, threshold=3.665e+02, percent-clipped=1.0 +2023-04-28 02:29:54,401 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7827, 1.4380, 1.9427, 2.0821, 1.6311, 1.5007, 1.6309, 1.1263], + device='cuda:0'), covar=tensor([0.0494, 0.0819, 0.0463, 0.0542, 0.0754, 0.1096, 0.0687, 0.0617], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0068, 0.0066, 0.0069, 0.0075, 0.0095, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 02:30:06,049 INFO [finetune.py:976] (0/7) Epoch 28, batch 2050, loss[loss=0.1491, simple_loss=0.2268, pruned_loss=0.03567, over 4700.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.235, pruned_loss=0.04494, over 953725.64 frames. ], batch size: 23, lr: 2.88e-03, grad_scale: 64.0 +2023-04-28 02:30:35,514 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7967, 1.2778, 1.8497, 2.2411, 1.8971, 1.7249, 1.7725, 1.7617], + device='cuda:0'), covar=tensor([0.4312, 0.6796, 0.5902, 0.5469, 0.5467, 0.7695, 0.8053, 0.8465], + device='cuda:0'), in_proj_covar=tensor([0.0445, 0.0424, 0.0520, 0.0508, 0.0473, 0.0510, 0.0512, 0.0527], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:30:43,822 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156726.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 02:31:05,851 INFO [finetune.py:976] (0/7) Epoch 28, batch 2100, loss[loss=0.1911, simple_loss=0.2655, pruned_loss=0.05833, over 4852.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2366, pruned_loss=0.04576, over 954505.37 frames. ], batch size: 44, lr: 2.88e-03, grad_scale: 64.0 +2023-04-28 02:31:07,078 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156749.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:31:18,127 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.58 vs. limit=5.0 +2023-04-28 02:31:39,409 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=156774.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:31:39,962 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.904e+01 1.566e+02 1.862e+02 2.081e+02 4.387e+02, threshold=3.725e+02, percent-clipped=1.0 +2023-04-28 02:32:10,049 INFO [finetune.py:976] (0/7) Epoch 28, batch 2150, loss[loss=0.1723, simple_loss=0.2344, pruned_loss=0.05512, over 4801.00 frames. ], tot_loss[loss=0.166, simple_loss=0.239, pruned_loss=0.04644, over 954368.75 frames. ], batch size: 25, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:32:22,729 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156810.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:32:46,425 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.96 vs. limit=2.0 +2023-04-28 02:32:48,110 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6934, 1.9847, 0.9780, 1.4477, 2.1347, 1.5274, 1.4302, 1.5860], + device='cuda:0'), covar=tensor([0.0470, 0.0336, 0.0304, 0.0509, 0.0243, 0.0460, 0.0470, 0.0536], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0047, 0.0039, 0.0054, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 02:32:57,254 INFO [finetune.py:976] (0/7) Epoch 28, batch 2200, loss[loss=0.1762, simple_loss=0.258, pruned_loss=0.04727, over 4926.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2406, pruned_loss=0.04674, over 951889.46 frames. ], batch size: 38, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:33:14,513 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0572, 1.7449, 2.1993, 2.4435, 2.0841, 1.9322, 2.0723, 1.9952], + device='cuda:0'), covar=tensor([0.4570, 0.7940, 0.7396, 0.5844, 0.6133, 0.9069, 0.9038, 1.0893], + device='cuda:0'), in_proj_covar=tensor([0.0446, 0.0425, 0.0522, 0.0509, 0.0475, 0.0512, 0.0512, 0.0529], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:33:15,048 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156872.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:33:17,875 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.069e+02 1.564e+02 1.773e+02 2.100e+02 3.301e+02, threshold=3.547e+02, percent-clipped=0.0 +2023-04-28 02:33:23,526 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=156885.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:33:31,444 INFO [finetune.py:976] (0/7) Epoch 28, batch 2250, loss[loss=0.1335, simple_loss=0.211, pruned_loss=0.02802, over 4739.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2407, pruned_loss=0.04643, over 954464.32 frames. ], batch size: 23, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:33:47,965 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=156920.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:34:04,139 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=156946.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:34:04,159 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=156946.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:34:05,299 INFO [finetune.py:976] (0/7) Epoch 28, batch 2300, loss[loss=0.2024, simple_loss=0.2684, pruned_loss=0.06821, over 4812.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2413, pruned_loss=0.04679, over 952747.57 frames. ], batch size: 39, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:34:11,236 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6165, 1.7273, 0.7095, 1.3153, 1.6900, 1.4745, 1.3343, 1.4509], + device='cuda:0'), covar=tensor([0.0489, 0.0344, 0.0346, 0.0544, 0.0271, 0.0507, 0.0512, 0.0568], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0039, 0.0053, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 02:34:25,206 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.091e+02 1.469e+02 1.728e+02 2.151e+02 3.571e+02, threshold=3.456e+02, percent-clipped=1.0 +2023-04-28 02:34:36,687 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=156994.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:34:39,084 INFO [finetune.py:976] (0/7) Epoch 28, batch 2350, loss[loss=0.1714, simple_loss=0.2458, pruned_loss=0.04851, over 4901.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2387, pruned_loss=0.04604, over 951908.18 frames. ], batch size: 37, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:34:47,982 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157010.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:35:23,600 INFO [finetune.py:976] (0/7) Epoch 28, batch 2400, loss[loss=0.1441, simple_loss=0.2228, pruned_loss=0.03273, over 4723.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2378, pruned_loss=0.04664, over 953144.30 frames. ], batch size: 23, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:35:25,514 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3638, 1.0622, 3.7477, 3.3083, 3.3367, 3.5550, 3.5546, 3.1319], + device='cuda:0'), covar=tensor([0.9449, 0.8239, 0.2107, 0.3420, 0.2225, 0.3743, 0.2681, 0.3345], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0308, 0.0407, 0.0407, 0.0347, 0.0415, 0.0318, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:35:50,224 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157071.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:35:53,121 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.080e+02 1.553e+02 1.893e+02 2.191e+02 4.408e+02, threshold=3.786e+02, percent-clipped=4.0 +2023-04-28 02:36:07,063 INFO [finetune.py:976] (0/7) Epoch 28, batch 2450, loss[loss=0.2321, simple_loss=0.2893, pruned_loss=0.08751, over 4917.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2348, pruned_loss=0.04565, over 951868.51 frames. ], batch size: 36, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:36:11,407 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157105.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:36:17,403 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7457, 2.7984, 2.3893, 2.5156, 2.8551, 2.5033, 3.7295, 2.3650], + device='cuda:0'), covar=tensor([0.3656, 0.2129, 0.3657, 0.3274, 0.1695, 0.2464, 0.1289, 0.3641], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0355, 0.0424, 0.0351, 0.0382, 0.0377, 0.0371, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:36:58,139 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6561, 1.7304, 1.5813, 1.2373, 1.2466, 1.2594, 1.5988, 1.2039], + device='cuda:0'), covar=tensor([0.1793, 0.1493, 0.1519, 0.1696, 0.2337, 0.2014, 0.1052, 0.2171], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0209, 0.0170, 0.0205, 0.0200, 0.0186, 0.0156, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 02:36:58,725 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157142.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:36:59,967 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6902, 2.0811, 1.8338, 2.0754, 1.3926, 1.7859, 1.9307, 1.4116], + device='cuda:0'), covar=tensor([0.2439, 0.1757, 0.1242, 0.1616, 0.3901, 0.1543, 0.1951, 0.2588], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0302, 0.0217, 0.0276, 0.0315, 0.0254, 0.0248, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1280e-04, 1.1865e-04, 8.5321e-05, 1.0868e-04, 1.2703e-04, 9.9690e-05, + 9.9918e-05, 1.0382e-04], device='cuda:0') +2023-04-28 02:37:00,658 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-28 02:37:07,637 INFO [finetune.py:976] (0/7) Epoch 28, batch 2500, loss[loss=0.2166, simple_loss=0.3019, pruned_loss=0.06564, over 4799.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2377, pruned_loss=0.04677, over 950691.02 frames. ], batch size: 51, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:37:19,150 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7601, 1.8943, 1.0060, 1.4424, 2.0326, 1.6591, 1.5287, 1.5625], + device='cuda:0'), covar=tensor([0.0466, 0.0352, 0.0307, 0.0539, 0.0245, 0.0488, 0.0477, 0.0533], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0047, 0.0039, 0.0054, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 02:37:44,333 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.608e+02 1.980e+02 2.407e+02 6.066e+02, threshold=3.960e+02, percent-clipped=5.0 +2023-04-28 02:38:14,042 INFO [finetune.py:976] (0/7) Epoch 28, batch 2550, loss[loss=0.1711, simple_loss=0.2656, pruned_loss=0.03834, over 4901.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.242, pruned_loss=0.04761, over 950118.64 frames. ], batch size: 43, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:38:22,899 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157203.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:39:11,685 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157241.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:39:16,445 INFO [finetune.py:976] (0/7) Epoch 28, batch 2600, loss[loss=0.1947, simple_loss=0.2782, pruned_loss=0.05559, over 4819.00 frames. ], tot_loss[loss=0.171, simple_loss=0.2444, pruned_loss=0.04882, over 951208.10 frames. ], batch size: 38, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:39:50,662 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.630e+02 1.851e+02 2.303e+02 4.332e+02, threshold=3.701e+02, percent-clipped=1.0 +2023-04-28 02:39:55,424 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3279, 1.5707, 1.4274, 1.6982, 1.6097, 1.8926, 1.4177, 3.3501], + device='cuda:0'), covar=tensor([0.0570, 0.0739, 0.0737, 0.1078, 0.0590, 0.0566, 0.0704, 0.0138], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0042, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 02:40:05,059 INFO [finetune.py:976] (0/7) Epoch 28, batch 2650, loss[loss=0.1279, simple_loss=0.2001, pruned_loss=0.02785, over 4715.00 frames. ], tot_loss[loss=0.1718, simple_loss=0.2455, pruned_loss=0.04905, over 952665.39 frames. ], batch size: 23, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:40:09,283 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0046, 2.5248, 1.0490, 1.3753, 1.8864, 1.2002, 3.3224, 1.8802], + device='cuda:0'), covar=tensor([0.0742, 0.0566, 0.0818, 0.1246, 0.0531, 0.1033, 0.0252, 0.0577], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 02:40:10,623 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-28 02:40:43,829 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5525, 3.2106, 1.1742, 1.8162, 2.4558, 1.7613, 4.4778, 2.5002], + device='cuda:0'), covar=tensor([0.0645, 0.0629, 0.0819, 0.1233, 0.0532, 0.0925, 0.0257, 0.0546], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 02:40:53,194 INFO [finetune.py:976] (0/7) Epoch 28, batch 2700, loss[loss=0.1635, simple_loss=0.2432, pruned_loss=0.0419, over 4783.00 frames. ], tot_loss[loss=0.1697, simple_loss=0.2433, pruned_loss=0.04802, over 951195.74 frames. ], batch size: 51, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:40:58,708 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157356.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:41:04,710 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157366.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:41:10,643 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.027e+02 1.425e+02 1.670e+02 2.111e+02 3.242e+02, threshold=3.340e+02, percent-clipped=0.0 +2023-04-28 02:41:15,937 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157382.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:41:26,099 INFO [finetune.py:976] (0/7) Epoch 28, batch 2750, loss[loss=0.159, simple_loss=0.2422, pruned_loss=0.03788, over 4920.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2412, pruned_loss=0.04777, over 952664.98 frames. ], batch size: 38, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:41:30,014 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157404.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 02:41:31,077 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157405.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:41:38,334 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157417.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:41:40,199 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5255, 1.4463, 1.8259, 1.8865, 1.4265, 1.2878, 1.5427, 0.9866], + device='cuda:0'), covar=tensor([0.0580, 0.0558, 0.0352, 0.0534, 0.0649, 0.1085, 0.0545, 0.0583], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0065, 0.0069, 0.0075, 0.0094, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 02:41:45,210 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.58 vs. limit=2.0 +2023-04-28 02:42:02,269 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157443.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:42:05,176 INFO [finetune.py:976] (0/7) Epoch 28, batch 2800, loss[loss=0.1435, simple_loss=0.2221, pruned_loss=0.0324, over 4769.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2376, pruned_loss=0.04668, over 953080.33 frames. ], batch size: 28, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:42:13,769 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=157453.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:42:26,562 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157465.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 02:42:37,656 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7114, 1.5008, 1.9081, 2.0212, 1.5440, 1.4493, 1.6018, 0.9884], + device='cuda:0'), covar=tensor([0.0501, 0.0649, 0.0368, 0.0504, 0.0642, 0.1115, 0.0532, 0.0623], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0065, 0.0069, 0.0075, 0.0095, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 02:42:38,118 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.163e+02 1.553e+02 1.775e+02 2.169e+02 3.521e+02, threshold=3.549e+02, percent-clipped=1.0 +2023-04-28 02:42:49,254 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8320, 1.6789, 1.7479, 1.4150, 1.8764, 1.5822, 2.3256, 1.5689], + device='cuda:0'), covar=tensor([0.3460, 0.1887, 0.4542, 0.2739, 0.1532, 0.2295, 0.1614, 0.4481], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0356, 0.0426, 0.0351, 0.0383, 0.0378, 0.0373, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:43:07,286 INFO [finetune.py:976] (0/7) Epoch 28, batch 2850, loss[loss=0.164, simple_loss=0.2321, pruned_loss=0.04794, over 4169.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2366, pruned_loss=0.04619, over 953143.62 frames. ], batch size: 65, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:43:07,352 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157498.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:43:17,214 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157506.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 02:43:39,187 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8040, 1.7802, 1.0934, 1.5022, 2.1418, 1.6062, 1.4722, 1.5949], + device='cuda:0'), covar=tensor([0.0465, 0.0379, 0.0271, 0.0530, 0.0230, 0.0462, 0.0470, 0.0552], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0039, 0.0053, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 02:43:49,804 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4108, 1.6507, 1.8854, 1.9776, 1.8819, 1.9125, 1.8914, 1.9019], + device='cuda:0'), covar=tensor([0.3559, 0.5281, 0.4393, 0.4275, 0.5375, 0.6946, 0.5037, 0.4670], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0376, 0.0329, 0.0342, 0.0351, 0.0392, 0.0360, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 02:43:58,957 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9664, 1.7339, 2.1384, 2.3040, 2.0312, 1.8748, 2.0227, 1.9127], + device='cuda:0'), covar=tensor([0.4604, 0.7003, 0.6672, 0.5528, 0.5886, 0.8389, 0.8258, 1.0416], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0427, 0.0524, 0.0510, 0.0476, 0.0513, 0.0514, 0.0528], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:44:00,030 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157541.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:44:09,939 INFO [finetune.py:976] (0/7) Epoch 28, batch 2900, loss[loss=0.1763, simple_loss=0.2301, pruned_loss=0.0612, over 4698.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2392, pruned_loss=0.04641, over 952969.64 frames. ], batch size: 23, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:44:10,224 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.55 vs. limit=2.0 +2023-04-28 02:44:10,797 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-28 02:44:32,645 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-28 02:44:33,702 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157567.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 02:44:34,292 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157568.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:44:44,344 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.234e+02 1.564e+02 1.889e+02 2.208e+02 3.535e+02, threshold=3.777e+02, percent-clipped=0.0 +2023-04-28 02:45:03,636 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=157589.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:45:14,539 INFO [finetune.py:976] (0/7) Epoch 28, batch 2950, loss[loss=0.1512, simple_loss=0.2262, pruned_loss=0.03807, over 4832.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.242, pruned_loss=0.04742, over 953087.37 frames. ], batch size: 30, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:45:17,817 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1738, 1.7042, 1.5974, 1.8930, 1.7327, 1.9947, 1.5114, 3.5988], + device='cuda:0'), covar=tensor([0.0659, 0.0771, 0.0750, 0.1150, 0.0615, 0.0466, 0.0696, 0.0131], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0042, 0.0039, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0013, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 02:45:50,084 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157629.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:46:09,607 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9014, 3.7546, 2.9720, 4.4830, 3.6162, 3.8754, 1.9339, 3.8717], + device='cuda:0'), covar=tensor([0.1616, 0.1282, 0.4246, 0.1046, 0.2340, 0.1542, 0.4928, 0.2257], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0218, 0.0250, 0.0301, 0.0297, 0.0246, 0.0273, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 02:46:18,332 INFO [finetune.py:976] (0/7) Epoch 28, batch 3000, loss[loss=0.1736, simple_loss=0.2489, pruned_loss=0.04917, over 4842.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2432, pruned_loss=0.04748, over 953445.83 frames. ], batch size: 44, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:46:18,334 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-28 02:46:22,273 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0010, 1.2094, 1.8654, 2.4096, 2.0988, 1.9123, 1.8691, 1.8501], + device='cuda:0'), covar=tensor([0.4718, 0.7419, 0.7467, 0.5709, 0.6683, 0.8287, 0.9224, 0.7872], + device='cuda:0'), in_proj_covar=tensor([0.0447, 0.0425, 0.0522, 0.0509, 0.0475, 0.0512, 0.0513, 0.0527], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:46:34,887 INFO [finetune.py:1010] (0/7) Epoch 28, validation: loss=0.153, simple_loss=0.2217, pruned_loss=0.04213, over 2265189.00 frames. +2023-04-28 02:46:34,888 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-28 02:46:49,097 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157666.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:46:55,059 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.120e+02 1.557e+02 1.829e+02 2.170e+02 4.324e+02, threshold=3.658e+02, percent-clipped=1.0 +2023-04-28 02:47:09,007 INFO [finetune.py:976] (0/7) Epoch 28, batch 3050, loss[loss=0.1754, simple_loss=0.2511, pruned_loss=0.04986, over 4777.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2433, pruned_loss=0.04681, over 954926.21 frames. ], batch size: 26, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:47:10,962 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-28 02:47:27,234 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157712.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:47:28,436 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=157714.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:47:58,440 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157738.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:48:09,325 INFO [finetune.py:976] (0/7) Epoch 28, batch 3100, loss[loss=0.1523, simple_loss=0.2262, pruned_loss=0.03921, over 4719.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2407, pruned_loss=0.04571, over 956774.24 frames. ], batch size: 59, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:48:22,596 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157760.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 02:48:32,272 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.835e+01 1.560e+02 1.859e+02 2.154e+02 3.848e+02, threshold=3.719e+02, percent-clipped=2.0 +2023-04-28 02:48:36,114 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2367, 2.1917, 2.0404, 1.8692, 2.1782, 1.9189, 2.8816, 1.8189], + device='cuda:0'), covar=tensor([0.3284, 0.1846, 0.3469, 0.2826, 0.1714, 0.2325, 0.1132, 0.3829], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0354, 0.0425, 0.0351, 0.0383, 0.0376, 0.0372, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:48:46,289 INFO [finetune.py:976] (0/7) Epoch 28, batch 3150, loss[loss=0.2044, simple_loss=0.2643, pruned_loss=0.0722, over 4753.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2381, pruned_loss=0.04545, over 956657.24 frames. ], batch size: 27, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:48:46,384 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=157798.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:49:18,154 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=157846.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:49:19,357 INFO [finetune.py:976] (0/7) Epoch 28, batch 3200, loss[loss=0.1929, simple_loss=0.2712, pruned_loss=0.05729, over 4920.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2342, pruned_loss=0.04409, over 955583.63 frames. ], batch size: 43, lr: 2.88e-03, grad_scale: 32.0 +2023-04-28 02:49:22,546 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=157853.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:49:27,985 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157862.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 02:49:38,913 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.062e+02 1.560e+02 1.811e+02 2.192e+02 5.644e+02, threshold=3.622e+02, percent-clipped=2.0 +2023-04-28 02:50:07,753 INFO [finetune.py:976] (0/7) Epoch 28, batch 3250, loss[loss=0.1274, simple_loss=0.2033, pruned_loss=0.02579, over 4826.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2342, pruned_loss=0.04454, over 953242.70 frames. ], batch size: 25, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:50:29,653 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8718, 2.8310, 2.1816, 3.2859, 2.8542, 2.8642, 1.1734, 2.8308], + device='cuda:0'), covar=tensor([0.1947, 0.1558, 0.3647, 0.2825, 0.3334, 0.1992, 0.5410, 0.2780], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0218, 0.0250, 0.0301, 0.0297, 0.0247, 0.0273, 0.0270], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 02:50:29,698 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=157914.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:50:43,178 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=157924.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:51:14,742 INFO [finetune.py:976] (0/7) Epoch 28, batch 3300, loss[loss=0.1742, simple_loss=0.2513, pruned_loss=0.0486, over 4778.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2394, pruned_loss=0.04644, over 951879.92 frames. ], batch size: 54, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:51:14,883 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8778, 2.4871, 1.9045, 1.9350, 1.3339, 1.4152, 1.9758, 1.3481], + device='cuda:0'), covar=tensor([0.1609, 0.1293, 0.1257, 0.1511, 0.2197, 0.1848, 0.0950, 0.1964], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0209, 0.0170, 0.0205, 0.0200, 0.0187, 0.0157, 0.0189], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 02:51:51,135 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.051e+02 1.721e+02 1.967e+02 2.277e+02 5.584e+02, threshold=3.933e+02, percent-clipped=3.0 +2023-04-28 02:52:20,397 INFO [finetune.py:976] (0/7) Epoch 28, batch 3350, loss[loss=0.156, simple_loss=0.2338, pruned_loss=0.03912, over 4789.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2409, pruned_loss=0.04648, over 953370.42 frames. ], batch size: 29, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:52:21,776 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-158000.pt +2023-04-28 02:52:41,446 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158012.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:52:52,545 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6227, 1.6878, 1.4725, 1.2134, 1.2195, 1.2089, 1.4884, 1.1468], + device='cuda:0'), covar=tensor([0.1827, 0.1383, 0.1534, 0.1700, 0.2333, 0.2015, 0.1101, 0.2140], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0209, 0.0170, 0.0205, 0.0200, 0.0186, 0.0157, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 02:53:05,316 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.1926, 3.2307, 2.4271, 3.7712, 3.1888, 3.2294, 1.4258, 3.1905], + device='cuda:0'), covar=tensor([0.2110, 0.1436, 0.4265, 0.2442, 0.2990, 0.2023, 0.5702, 0.2756], + device='cuda:0'), in_proj_covar=tensor([0.0244, 0.0218, 0.0251, 0.0302, 0.0298, 0.0247, 0.0274, 0.0271], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 02:53:12,148 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158038.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:53:23,377 INFO [finetune.py:976] (0/7) Epoch 28, batch 3400, loss[loss=0.168, simple_loss=0.2513, pruned_loss=0.04234, over 4821.00 frames. ], tot_loss[loss=0.1683, simple_loss=0.2422, pruned_loss=0.04719, over 952446.39 frames. ], batch size: 47, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:53:41,188 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=158060.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:53:41,226 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158060.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 02:53:56,198 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.114e+02 1.582e+02 1.900e+02 2.246e+02 3.787e+02, threshold=3.800e+02, percent-clipped=0.0 +2023-04-28 02:54:10,626 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=158086.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:54:12,544 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1701, 1.6767, 2.0184, 2.4276, 2.0830, 1.6121, 1.3704, 1.8567], + device='cuda:0'), covar=tensor([0.2887, 0.2943, 0.1513, 0.2001, 0.2350, 0.2425, 0.4032, 0.1714], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0243, 0.0226, 0.0312, 0.0220, 0.0233, 0.0227, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 02:54:22,522 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7523, 3.7437, 0.8275, 1.9421, 2.0770, 2.5222, 2.0758, 1.1740], + device='cuda:0'), covar=tensor([0.1352, 0.0758, 0.2151, 0.1251, 0.1060, 0.1069, 0.1517, 0.1858], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0240, 0.0135, 0.0121, 0.0132, 0.0153, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 02:54:23,034 INFO [finetune.py:976] (0/7) Epoch 28, batch 3450, loss[loss=0.1301, simple_loss=0.1994, pruned_loss=0.03035, over 4712.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2426, pruned_loss=0.04733, over 952670.06 frames. ], batch size: 23, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:54:34,534 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=158108.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 02:54:54,847 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158122.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:55:18,898 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6096, 1.5026, 1.9120, 1.9726, 1.4109, 1.3336, 1.5670, 0.9967], + device='cuda:0'), covar=tensor([0.0505, 0.0651, 0.0404, 0.0558, 0.0706, 0.1030, 0.0595, 0.0587], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0065, 0.0069, 0.0075, 0.0094, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 02:55:29,823 INFO [finetune.py:976] (0/7) Epoch 28, batch 3500, loss[loss=0.1335, simple_loss=0.2037, pruned_loss=0.0316, over 4734.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2401, pruned_loss=0.04676, over 952990.67 frames. ], batch size: 23, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:55:48,555 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158162.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 02:56:07,660 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.474e+02 1.724e+02 2.129e+02 5.829e+02, threshold=3.447e+02, percent-clipped=1.0 +2023-04-28 02:56:12,594 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158183.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:56:19,794 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-28 02:56:23,134 INFO [finetune.py:976] (0/7) Epoch 28, batch 3550, loss[loss=0.1583, simple_loss=0.2361, pruned_loss=0.04023, over 4794.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2377, pruned_loss=0.04607, over 954277.48 frames. ], batch size: 29, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:56:30,173 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158209.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:56:30,774 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=158210.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 02:56:39,860 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158224.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:56:56,634 INFO [finetune.py:976] (0/7) Epoch 28, batch 3600, loss[loss=0.1635, simple_loss=0.2338, pruned_loss=0.04662, over 4831.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.235, pruned_loss=0.04544, over 955181.51 frames. ], batch size: 39, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:57:11,758 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=158272.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:57:14,138 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.946e+01 1.583e+02 1.935e+02 2.237e+02 3.659e+02, threshold=3.870e+02, percent-clipped=1.0 +2023-04-28 02:57:15,514 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6799, 1.2916, 1.8259, 2.2020, 1.7553, 1.6743, 1.7041, 1.6692], + device='cuda:0'), covar=tensor([0.4301, 0.6573, 0.5709, 0.5112, 0.5584, 0.7643, 0.7422, 0.8689], + device='cuda:0'), in_proj_covar=tensor([0.0445, 0.0425, 0.0520, 0.0507, 0.0473, 0.0510, 0.0512, 0.0525], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:57:29,421 INFO [finetune.py:976] (0/7) Epoch 28, batch 3650, loss[loss=0.183, simple_loss=0.2527, pruned_loss=0.05669, over 4927.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2365, pruned_loss=0.04569, over 955836.98 frames. ], batch size: 33, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:57:45,362 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2637, 2.9412, 1.1372, 1.5276, 2.3554, 1.3061, 3.8020, 1.8636], + device='cuda:0'), covar=tensor([0.0634, 0.0758, 0.0842, 0.1176, 0.0448, 0.0968, 0.0233, 0.0559], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 02:58:02,903 INFO [finetune.py:976] (0/7) Epoch 28, batch 3700, loss[loss=0.187, simple_loss=0.2582, pruned_loss=0.05789, over 4887.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2396, pruned_loss=0.04643, over 956123.91 frames. ], batch size: 32, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:58:19,869 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.537e+02 1.915e+02 2.211e+02 4.327e+02, threshold=3.830e+02, percent-clipped=1.0 +2023-04-28 02:58:25,401 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158384.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:58:35,213 INFO [finetune.py:976] (0/7) Epoch 28, batch 3750, loss[loss=0.1581, simple_loss=0.2296, pruned_loss=0.04331, over 4912.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2411, pruned_loss=0.04679, over 953695.26 frames. ], batch size: 42, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:58:55,264 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2098, 1.6942, 2.0811, 2.2552, 2.0690, 1.6886, 1.0834, 1.7698], + device='cuda:0'), covar=tensor([0.3229, 0.2992, 0.1702, 0.2042, 0.2441, 0.2567, 0.4131, 0.1991], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0245, 0.0227, 0.0314, 0.0221, 0.0235, 0.0228, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 02:58:58,213 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.67 vs. limit=5.0 +2023-04-28 02:58:59,409 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7254, 1.3080, 1.8321, 2.2743, 1.8443, 1.6950, 1.7503, 1.6937], + device='cuda:0'), covar=tensor([0.4357, 0.6702, 0.5763, 0.5049, 0.5704, 0.7690, 0.7663, 0.9370], + device='cuda:0'), in_proj_covar=tensor([0.0447, 0.0426, 0.0522, 0.0509, 0.0474, 0.0511, 0.0513, 0.0528], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 02:59:05,516 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158445.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 02:59:07,667 INFO [finetune.py:976] (0/7) Epoch 28, batch 3800, loss[loss=0.1855, simple_loss=0.2618, pruned_loss=0.05462, over 4920.00 frames. ], tot_loss[loss=0.1689, simple_loss=0.2426, pruned_loss=0.04754, over 954092.64 frames. ], batch size: 38, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 02:59:42,092 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.322e+01 1.536e+02 1.803e+02 2.140e+02 3.917e+02, threshold=3.606e+02, percent-clipped=1.0 +2023-04-28 02:59:43,416 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158478.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:00:06,919 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7224, 1.6068, 1.8273, 2.0618, 2.0677, 1.6917, 1.4190, 1.8051], + device='cuda:0'), covar=tensor([0.0895, 0.1279, 0.0843, 0.0607, 0.0658, 0.0908, 0.0782, 0.0656], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0203, 0.0185, 0.0170, 0.0178, 0.0177, 0.0150, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 03:00:07,898 INFO [finetune.py:976] (0/7) Epoch 28, batch 3850, loss[loss=0.1546, simple_loss=0.2303, pruned_loss=0.0395, over 4757.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2417, pruned_loss=0.04689, over 955642.59 frames. ], batch size: 28, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:00:26,839 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158509.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:01:10,982 INFO [finetune.py:976] (0/7) Epoch 28, batch 3900, loss[loss=0.173, simple_loss=0.2415, pruned_loss=0.05224, over 4817.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2386, pruned_loss=0.04604, over 955459.79 frames. ], batch size: 30, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:01:28,062 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=158557.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:01:51,818 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.216e+01 1.537e+02 1.820e+02 2.187e+02 3.711e+02, threshold=3.639e+02, percent-clipped=1.0 +2023-04-28 03:02:22,650 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5284, 3.2358, 0.9258, 1.8656, 1.9333, 2.4761, 1.8587, 1.0800], + device='cuda:0'), covar=tensor([0.1255, 0.0868, 0.1842, 0.1168, 0.0961, 0.0808, 0.1384, 0.1854], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0239, 0.0136, 0.0121, 0.0132, 0.0153, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 03:02:23,132 INFO [finetune.py:976] (0/7) Epoch 28, batch 3950, loss[loss=0.1853, simple_loss=0.2587, pruned_loss=0.05593, over 4820.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2358, pruned_loss=0.04552, over 956253.81 frames. ], batch size: 47, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:03:31,309 INFO [finetune.py:976] (0/7) Epoch 28, batch 4000, loss[loss=0.138, simple_loss=0.21, pruned_loss=0.03302, over 4728.00 frames. ], tot_loss[loss=0.1638, simple_loss=0.236, pruned_loss=0.04579, over 957020.66 frames. ], batch size: 23, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:04:13,586 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.149e+01 1.505e+02 1.815e+02 2.168e+02 4.885e+02, threshold=3.630e+02, percent-clipped=2.0 +2023-04-28 03:04:38,150 INFO [finetune.py:976] (0/7) Epoch 28, batch 4050, loss[loss=0.1758, simple_loss=0.2558, pruned_loss=0.04792, over 4862.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2391, pruned_loss=0.04629, over 955509.00 frames. ], batch size: 31, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:04:40,007 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6310, 1.0191, 1.6561, 2.0856, 1.6938, 1.6063, 1.6324, 1.6165], + device='cuda:0'), covar=tensor([0.4198, 0.6502, 0.5615, 0.5186, 0.5304, 0.7155, 0.7761, 0.8291], + device='cuda:0'), in_proj_covar=tensor([0.0445, 0.0424, 0.0521, 0.0509, 0.0474, 0.0510, 0.0511, 0.0527], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 03:05:33,911 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=158740.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:05:43,860 INFO [finetune.py:976] (0/7) Epoch 28, batch 4100, loss[loss=0.1634, simple_loss=0.237, pruned_loss=0.04485, over 4903.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2421, pruned_loss=0.04741, over 956055.67 frames. ], batch size: 37, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:06:25,662 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.124e+02 1.629e+02 1.870e+02 2.384e+02 6.257e+02, threshold=3.741e+02, percent-clipped=1.0 +2023-04-28 03:06:26,395 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158777.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:06:26,983 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=158778.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:06:35,233 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158783.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:06:38,317 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1531, 2.6395, 2.3237, 2.5438, 1.9610, 2.3499, 2.2567, 1.8921], + device='cuda:0'), covar=tensor([0.1544, 0.0897, 0.0745, 0.0943, 0.3059, 0.1090, 0.1659, 0.2277], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0299, 0.0216, 0.0273, 0.0311, 0.0251, 0.0245, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1150e-04, 1.1774e-04, 8.5028e-05, 1.0747e-04, 1.2538e-04, 9.8519e-05, + 9.8603e-05, 1.0245e-04], device='cuda:0') +2023-04-28 03:06:49,753 INFO [finetune.py:976] (0/7) Epoch 28, batch 4150, loss[loss=0.1794, simple_loss=0.2584, pruned_loss=0.05022, over 4748.00 frames. ], tot_loss[loss=0.168, simple_loss=0.242, pruned_loss=0.04706, over 954832.78 frames. ], batch size: 26, lr: 2.87e-03, grad_scale: 64.0 +2023-04-28 03:07:10,879 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7555, 2.1755, 1.8701, 1.6214, 1.2324, 1.3178, 2.0235, 1.2048], + device='cuda:0'), covar=tensor([0.1839, 0.1472, 0.1394, 0.1683, 0.2388, 0.1977, 0.0917, 0.2156], + device='cuda:0'), in_proj_covar=tensor([0.0201, 0.0213, 0.0173, 0.0208, 0.0203, 0.0189, 0.0159, 0.0191], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 03:07:30,857 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=158826.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:07:41,996 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=158835.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 03:07:43,893 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158838.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:07:52,630 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158844.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:07:54,975 INFO [finetune.py:976] (0/7) Epoch 28, batch 4200, loss[loss=0.1396, simple_loss=0.2115, pruned_loss=0.0339, over 4734.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2427, pruned_loss=0.04678, over 955143.02 frames. ], batch size: 54, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:08:23,825 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.1456, 4.2069, 3.0444, 4.8326, 4.2006, 4.1520, 1.8424, 4.1103], + device='cuda:0'), covar=tensor([0.1780, 0.1089, 0.3705, 0.1035, 0.2459, 0.1747, 0.5933, 0.2129], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0220, 0.0252, 0.0304, 0.0300, 0.0251, 0.0277, 0.0274], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 03:08:26,941 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.65 vs. limit=2.0 +2023-04-28 03:08:37,326 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.042e+02 1.525e+02 1.688e+02 1.998e+02 3.338e+02, threshold=3.377e+02, percent-clipped=0.0 +2023-04-28 03:09:04,985 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=158896.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 03:09:05,557 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1953, 1.4041, 1.2735, 1.6645, 1.5742, 1.4945, 1.3639, 2.4645], + device='cuda:0'), covar=tensor([0.0603, 0.0803, 0.0829, 0.1234, 0.0618, 0.0499, 0.0730, 0.0225], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0014, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 03:09:06,085 INFO [finetune.py:976] (0/7) Epoch 28, batch 4250, loss[loss=0.1736, simple_loss=0.2444, pruned_loss=0.05135, over 4819.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.24, pruned_loss=0.04636, over 954042.50 frames. ], batch size: 33, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:10:01,896 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2888, 2.2555, 1.9011, 1.8988, 2.4618, 1.9362, 2.9047, 1.7628], + device='cuda:0'), covar=tensor([0.3841, 0.2115, 0.4381, 0.3560, 0.1767, 0.2717, 0.1342, 0.4165], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0356, 0.0423, 0.0352, 0.0384, 0.0378, 0.0373, 0.0424], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 03:10:12,868 INFO [finetune.py:976] (0/7) Epoch 28, batch 4300, loss[loss=0.1639, simple_loss=0.2345, pruned_loss=0.0466, over 4926.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2368, pruned_loss=0.04533, over 956102.29 frames. ], batch size: 37, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:10:14,235 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.71 vs. limit=2.0 +2023-04-28 03:10:48,762 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.467e+01 1.425e+02 1.689e+02 1.969e+02 4.397e+02, threshold=3.377e+02, percent-clipped=1.0 +2023-04-28 03:11:00,237 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.43 vs. limit=2.0 +2023-04-28 03:11:18,967 INFO [finetune.py:976] (0/7) Epoch 28, batch 4350, loss[loss=0.1464, simple_loss=0.2184, pruned_loss=0.0372, over 4817.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.234, pruned_loss=0.04469, over 956911.27 frames. ], batch size: 40, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:11:28,796 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5089, 2.3141, 2.5874, 2.9925, 2.7995, 2.6453, 2.1912, 2.6676], + device='cuda:0'), covar=tensor([0.0832, 0.1090, 0.0638, 0.0635, 0.0681, 0.0752, 0.0683, 0.0552], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0202, 0.0184, 0.0169, 0.0178, 0.0177, 0.0150, 0.0175], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 03:12:15,670 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159040.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:12:26,355 INFO [finetune.py:976] (0/7) Epoch 28, batch 4400, loss[loss=0.1667, simple_loss=0.2415, pruned_loss=0.04594, over 4831.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2361, pruned_loss=0.04591, over 957726.47 frames. ], batch size: 47, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:12:54,424 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159069.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:13:05,086 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.784e+01 1.599e+02 1.852e+02 2.200e+02 4.625e+02, threshold=3.703e+02, percent-clipped=1.0 +2023-04-28 03:13:19,032 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=159088.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:13:29,730 INFO [finetune.py:976] (0/7) Epoch 28, batch 4450, loss[loss=0.1419, simple_loss=0.2192, pruned_loss=0.03227, over 4737.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2401, pruned_loss=0.04675, over 958308.18 frames. ], batch size: 27, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:14:09,949 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2925, 1.5149, 1.3673, 1.6992, 1.6384, 1.9208, 1.4267, 3.3289], + device='cuda:0'), covar=tensor([0.0604, 0.0823, 0.0791, 0.1225, 0.0643, 0.0464, 0.0740, 0.0158], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0014, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 03:14:09,978 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159130.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:14:12,205 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159133.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:14:21,332 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159139.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:14:38,438 INFO [finetune.py:976] (0/7) Epoch 28, batch 4500, loss[loss=0.1942, simple_loss=0.2645, pruned_loss=0.06196, over 4921.00 frames. ], tot_loss[loss=0.1677, simple_loss=0.2412, pruned_loss=0.04716, over 957334.96 frames. ], batch size: 36, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:14:49,776 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159159.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:15:11,106 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.154e+02 1.553e+02 1.824e+02 2.169e+02 4.146e+02, threshold=3.648e+02, percent-clipped=1.0 +2023-04-28 03:15:11,231 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9346, 1.6788, 1.8944, 2.2774, 2.3431, 1.9097, 1.7242, 2.0343], + device='cuda:0'), covar=tensor([0.0812, 0.1269, 0.0762, 0.0568, 0.0638, 0.0769, 0.0695, 0.0537], + device='cuda:0'), in_proj_covar=tensor([0.0182, 0.0202, 0.0183, 0.0169, 0.0177, 0.0176, 0.0149, 0.0174], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 03:15:31,026 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159191.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 03:15:41,718 INFO [finetune.py:976] (0/7) Epoch 28, batch 4550, loss[loss=0.1742, simple_loss=0.2484, pruned_loss=0.05, over 4744.00 frames. ], tot_loss[loss=0.1684, simple_loss=0.2425, pruned_loss=0.04715, over 956654.78 frames. ], batch size: 59, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:16:05,597 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159220.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:16:45,690 INFO [finetune.py:976] (0/7) Epoch 28, batch 4600, loss[loss=0.2245, simple_loss=0.2893, pruned_loss=0.07978, over 4256.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.242, pruned_loss=0.04711, over 953185.81 frames. ], batch size: 66, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:17:18,839 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.827e+01 1.517e+02 1.769e+02 2.322e+02 3.935e+02, threshold=3.539e+02, percent-clipped=1.0 +2023-04-28 03:17:38,410 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159290.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:17:49,022 INFO [finetune.py:976] (0/7) Epoch 28, batch 4650, loss[loss=0.1711, simple_loss=0.2338, pruned_loss=0.05416, over 4906.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2402, pruned_loss=0.04703, over 954665.11 frames. ], batch size: 36, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:18:54,507 INFO [finetune.py:976] (0/7) Epoch 28, batch 4700, loss[loss=0.1452, simple_loss=0.2174, pruned_loss=0.03651, over 4182.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2383, pruned_loss=0.04673, over 956014.18 frames. ], batch size: 18, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:19:03,236 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159351.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:19:35,875 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.49 vs. limit=5.0 +2023-04-28 03:19:36,243 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.039e+02 1.471e+02 1.707e+02 1.992e+02 4.348e+02, threshold=3.414e+02, percent-clipped=1.0 +2023-04-28 03:20:00,611 INFO [finetune.py:976] (0/7) Epoch 28, batch 4750, loss[loss=0.1749, simple_loss=0.248, pruned_loss=0.05087, over 4925.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2365, pruned_loss=0.04641, over 956137.87 frames. ], batch size: 38, lr: 2.87e-03, grad_scale: 32.0 +2023-04-28 03:20:41,276 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159425.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:20:46,244 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159433.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:20:55,204 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159439.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:21:03,941 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-28 03:21:06,688 INFO [finetune.py:976] (0/7) Epoch 28, batch 4800, loss[loss=0.2182, simple_loss=0.3035, pruned_loss=0.0664, over 4808.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2383, pruned_loss=0.04651, over 957241.76 frames. ], batch size: 45, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:21:48,625 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.050e+02 1.679e+02 2.036e+02 2.333e+02 4.105e+02, threshold=4.072e+02, percent-clipped=2.0 +2023-04-28 03:21:48,752 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3729, 3.0246, 2.6096, 2.8531, 2.2371, 2.5129, 2.6639, 2.1680], + device='cuda:0'), covar=tensor([0.2103, 0.1222, 0.0663, 0.1139, 0.2953, 0.1166, 0.1938, 0.2509], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0299, 0.0216, 0.0273, 0.0311, 0.0251, 0.0245, 0.0261], + device='cuda:0'), out_proj_covar=tensor([1.1169e-04, 1.1765e-04, 8.4930e-05, 1.0742e-04, 1.2560e-04, 9.8552e-05, + 9.8794e-05, 1.0269e-04], device='cuda:0') +2023-04-28 03:21:51,196 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=159481.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:21:59,475 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=159487.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:22:01,992 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159491.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 03:22:12,296 INFO [finetune.py:976] (0/7) Epoch 28, batch 4850, loss[loss=0.1686, simple_loss=0.2578, pruned_loss=0.03977, over 4847.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2405, pruned_loss=0.04649, over 956144.89 frames. ], batch size: 44, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:22:31,647 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.96 vs. limit=5.0 +2023-04-28 03:22:32,245 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.11 vs. limit=2.0 +2023-04-28 03:22:40,600 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159515.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:23:05,839 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=159539.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 03:23:16,525 INFO [finetune.py:976] (0/7) Epoch 28, batch 4900, loss[loss=0.1825, simple_loss=0.2772, pruned_loss=0.04394, over 4708.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2423, pruned_loss=0.04739, over 955236.36 frames. ], batch size: 54, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:23:56,948 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.158e+02 1.586e+02 1.930e+02 2.270e+02 5.838e+02, threshold=3.860e+02, percent-clipped=1.0 +2023-04-28 03:24:18,510 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5655, 1.9046, 2.0692, 2.1473, 2.0225, 2.0476, 2.0859, 2.0774], + device='cuda:0'), covar=tensor([0.4137, 0.5364, 0.4342, 0.4496, 0.5661, 0.6969, 0.5311, 0.4428], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0378, 0.0331, 0.0344, 0.0354, 0.0394, 0.0364, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 03:24:19,084 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9504, 2.3182, 0.9500, 1.2409, 1.7067, 1.1452, 2.5310, 1.4166], + device='cuda:0'), covar=tensor([0.0714, 0.0612, 0.0714, 0.1289, 0.0451, 0.1078, 0.0399, 0.0708], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 03:24:20,243 INFO [finetune.py:976] (0/7) Epoch 28, batch 4950, loss[loss=0.1509, simple_loss=0.2274, pruned_loss=0.03722, over 4918.00 frames. ], tot_loss[loss=0.169, simple_loss=0.243, pruned_loss=0.04751, over 955566.77 frames. ], batch size: 33, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:24:26,679 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9195, 2.3359, 1.0457, 1.2158, 1.7484, 1.1763, 2.5366, 1.4180], + device='cuda:0'), covar=tensor([0.0743, 0.0587, 0.0689, 0.1343, 0.0441, 0.1067, 0.0471, 0.0702], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 03:25:08,549 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159631.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:25:23,006 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159646.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:25:24,154 INFO [finetune.py:976] (0/7) Epoch 28, batch 5000, loss[loss=0.1796, simple_loss=0.2395, pruned_loss=0.05988, over 4819.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2416, pruned_loss=0.04719, over 953740.63 frames. ], batch size: 25, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:26:05,240 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.105e+02 1.569e+02 1.814e+02 2.278e+02 4.708e+02, threshold=3.628e+02, percent-clipped=1.0 +2023-04-28 03:26:24,426 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159692.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:26:32,514 INFO [finetune.py:976] (0/7) Epoch 28, batch 5050, loss[loss=0.1422, simple_loss=0.2111, pruned_loss=0.03667, over 4930.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2392, pruned_loss=0.04647, over 954323.25 frames. ], batch size: 33, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:27:06,771 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159725.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:27:30,167 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8046, 2.8807, 2.3453, 2.6117, 3.0125, 2.6955, 3.8277, 2.2592], + device='cuda:0'), covar=tensor([0.3537, 0.2096, 0.3992, 0.3008, 0.1732, 0.2241, 0.1120, 0.3803], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0351, 0.0419, 0.0349, 0.0379, 0.0373, 0.0369, 0.0420], + device='cuda:0'), out_proj_covar=tensor([9.9146e-05, 1.0465e-04, 1.2674e-04, 1.0433e-04, 1.1220e-04, 1.1055e-04, + 1.0769e-04, 1.2633e-04], device='cuda:0') +2023-04-28 03:27:35,600 INFO [finetune.py:976] (0/7) Epoch 28, batch 5100, loss[loss=0.1271, simple_loss=0.1929, pruned_loss=0.0307, over 4690.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2355, pruned_loss=0.0451, over 954231.51 frames. ], batch size: 23, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:27:46,796 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6924, 1.7789, 1.7574, 1.3988, 1.7615, 1.4656, 2.1771, 1.4891], + device='cuda:0'), covar=tensor([0.4464, 0.2092, 0.5325, 0.3151, 0.1817, 0.2601, 0.1727, 0.5436], + device='cuda:0'), in_proj_covar=tensor([0.0336, 0.0351, 0.0419, 0.0349, 0.0379, 0.0373, 0.0369, 0.0420], + device='cuda:0'), out_proj_covar=tensor([9.9127e-05, 1.0464e-04, 1.2673e-04, 1.0434e-04, 1.1219e-04, 1.1060e-04, + 1.0771e-04, 1.2633e-04], device='cuda:0') +2023-04-28 03:28:09,659 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=159773.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:28:13,002 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.052e+01 1.474e+02 1.729e+02 2.168e+02 4.974e+02, threshold=3.458e+02, percent-clipped=1.0 +2023-04-28 03:28:34,339 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1383, 0.8723, 0.9109, 0.8708, 1.2561, 1.0383, 0.9697, 0.9400], + device='cuda:0'), covar=tensor([0.2074, 0.1884, 0.2471, 0.1615, 0.1250, 0.1774, 0.1944, 0.2771], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0305, 0.0346, 0.0285, 0.0322, 0.0303, 0.0297, 0.0371], + device='cuda:0'), out_proj_covar=tensor([6.3748e-05, 6.2653e-05, 7.2482e-05, 5.6984e-05, 6.5642e-05, 6.2934e-05, + 6.1261e-05, 7.8353e-05], device='cuda:0') +2023-04-28 03:28:41,788 INFO [finetune.py:976] (0/7) Epoch 28, batch 5150, loss[loss=0.1542, simple_loss=0.2372, pruned_loss=0.03556, over 4927.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.235, pruned_loss=0.04511, over 954504.31 frames. ], batch size: 38, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:29:03,545 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159815.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:29:36,089 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9613, 3.8536, 2.8997, 4.5440, 3.9751, 3.9277, 1.6091, 3.9577], + device='cuda:0'), covar=tensor([0.1620, 0.1240, 0.3264, 0.1389, 0.2626, 0.1712, 0.6293, 0.2184], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0222, 0.0253, 0.0306, 0.0302, 0.0252, 0.0278, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 03:29:45,762 INFO [finetune.py:976] (0/7) Epoch 28, batch 5200, loss[loss=0.1615, simple_loss=0.2422, pruned_loss=0.0404, over 4829.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2397, pruned_loss=0.04646, over 955731.34 frames. ], batch size: 30, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:29:54,899 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1657, 2.1828, 2.0888, 1.9066, 2.3893, 2.0224, 2.8495, 1.7744], + device='cuda:0'), covar=tensor([0.3241, 0.1690, 0.3518, 0.2445, 0.1429, 0.2123, 0.1252, 0.3983], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0354, 0.0421, 0.0351, 0.0382, 0.0376, 0.0371, 0.0423], + device='cuda:0'), out_proj_covar=tensor([9.9881e-05, 1.0527e-04, 1.2747e-04, 1.0507e-04, 1.1296e-04, 1.1146e-04, + 1.0846e-04, 1.2704e-04], device='cuda:0') +2023-04-28 03:29:55,488 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159855.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:30:05,366 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=159863.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:30:26,728 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.215e+02 1.682e+02 1.955e+02 2.433e+02 4.346e+02, threshold=3.909e+02, percent-clipped=3.0 +2023-04-28 03:30:51,955 INFO [finetune.py:976] (0/7) Epoch 28, batch 5250, loss[loss=0.1328, simple_loss=0.2082, pruned_loss=0.0287, over 4783.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2413, pruned_loss=0.04671, over 956614.67 frames. ], batch size: 29, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:31:09,952 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=159910.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:31:13,623 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159916.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:31:55,417 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=159946.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:32:02,069 INFO [finetune.py:976] (0/7) Epoch 28, batch 5300, loss[loss=0.1775, simple_loss=0.2522, pruned_loss=0.05143, over 4817.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2413, pruned_loss=0.04666, over 955185.94 frames. ], batch size: 33, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:32:26,779 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=159971.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:32:35,449 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.404e+01 1.544e+02 1.829e+02 2.287e+02 5.054e+02, threshold=3.659e+02, percent-clipped=1.0 +2023-04-28 03:32:35,600 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7087, 2.0712, 1.7745, 1.4418, 1.2826, 1.3120, 1.7859, 1.2466], + device='cuda:0'), covar=tensor([0.1876, 0.1309, 0.1497, 0.1851, 0.2371, 0.2047, 0.1007, 0.2161], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0210, 0.0171, 0.0206, 0.0202, 0.0187, 0.0157, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 03:32:46,384 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.2193, 3.2895, 2.5472, 3.7925, 3.2589, 3.2680, 1.4625, 3.2189], + device='cuda:0'), covar=tensor([0.2073, 0.1371, 0.3433, 0.2479, 0.3053, 0.1953, 0.5726, 0.2678], + device='cuda:0'), in_proj_covar=tensor([0.0247, 0.0222, 0.0253, 0.0307, 0.0302, 0.0252, 0.0278, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 03:32:46,452 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0888, 1.5884, 1.9821, 2.0811, 1.9538, 1.5425, 1.1137, 1.6492], + device='cuda:0'), covar=tensor([0.2817, 0.2863, 0.1560, 0.2048, 0.2492, 0.2392, 0.4272, 0.1920], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0245, 0.0227, 0.0313, 0.0222, 0.0234, 0.0228, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 03:32:48,724 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=159987.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:32:58,788 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=159994.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:33:06,690 INFO [finetune.py:976] (0/7) Epoch 28, batch 5350, loss[loss=0.1669, simple_loss=0.2248, pruned_loss=0.05453, over 4826.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.242, pruned_loss=0.04678, over 954596.29 frames. ], batch size: 30, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:33:08,036 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-160000.pt +2023-04-28 03:34:13,400 INFO [finetune.py:976] (0/7) Epoch 28, batch 5400, loss[loss=0.1482, simple_loss=0.2202, pruned_loss=0.03814, over 4758.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2393, pruned_loss=0.04625, over 955068.83 frames. ], batch size: 28, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:34:46,553 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.246e+02 1.579e+02 1.912e+02 2.220e+02 4.270e+02, threshold=3.825e+02, percent-clipped=1.0 +2023-04-28 03:35:18,036 INFO [finetune.py:976] (0/7) Epoch 28, batch 5450, loss[loss=0.1369, simple_loss=0.2048, pruned_loss=0.03449, over 4742.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2362, pruned_loss=0.04518, over 952997.80 frames. ], batch size: 27, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:36:12,219 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8486, 1.6556, 1.9597, 2.2727, 2.3371, 1.8284, 1.5691, 2.0993], + device='cuda:0'), covar=tensor([0.0795, 0.1166, 0.0686, 0.0548, 0.0494, 0.0817, 0.0719, 0.0501], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0204, 0.0183, 0.0171, 0.0178, 0.0177, 0.0150, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 03:36:22,771 INFO [finetune.py:976] (0/7) Epoch 28, batch 5500, loss[loss=0.1512, simple_loss=0.2233, pruned_loss=0.03959, over 4751.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2328, pruned_loss=0.04392, over 951521.44 frames. ], batch size: 54, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:37:00,968 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 7.993e+01 1.507e+02 1.787e+02 2.170e+02 4.329e+02, threshold=3.573e+02, percent-clipped=2.0 +2023-04-28 03:37:27,651 INFO [finetune.py:976] (0/7) Epoch 28, batch 5550, loss[loss=0.1272, simple_loss=0.2078, pruned_loss=0.02334, over 4795.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2356, pruned_loss=0.04514, over 950121.89 frames. ], batch size: 25, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:37:41,104 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160211.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:38:03,248 INFO [finetune.py:976] (0/7) Epoch 28, batch 5600, loss[loss=0.208, simple_loss=0.2921, pruned_loss=0.06197, over 4905.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2389, pruned_loss=0.04603, over 950086.39 frames. ], batch size: 36, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:38:13,711 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160266.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:38:17,328 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3141, 2.2525, 2.1936, 2.0233, 2.4156, 2.0161, 3.0795, 1.8690], + device='cuda:0'), covar=tensor([0.2908, 0.1599, 0.3021, 0.2485, 0.1375, 0.2214, 0.1082, 0.3497], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0355, 0.0422, 0.0354, 0.0384, 0.0378, 0.0372, 0.0426], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 03:38:18,006 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-28 03:38:20,147 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.023e+02 1.566e+02 2.005e+02 2.303e+02 3.818e+02, threshold=4.010e+02, percent-clipped=2.0 +2023-04-28 03:38:26,531 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160287.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:38:32,860 INFO [finetune.py:976] (0/7) Epoch 28, batch 5650, loss[loss=0.178, simple_loss=0.2617, pruned_loss=0.04718, over 4903.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2418, pruned_loss=0.04609, over 951472.82 frames. ], batch size: 43, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:38:38,247 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160307.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 03:38:40,007 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0090, 1.9584, 1.7479, 1.6217, 2.1011, 1.8278, 2.5729, 1.5182], + device='cuda:0'), covar=tensor([0.3502, 0.1934, 0.4522, 0.3006, 0.1468, 0.2149, 0.1253, 0.4596], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0354, 0.0421, 0.0353, 0.0383, 0.0377, 0.0371, 0.0425], + device='cuda:0'), out_proj_covar=tensor([9.9739e-05, 1.0538e-04, 1.2730e-04, 1.0543e-04, 1.1343e-04, 1.1171e-04, + 1.0836e-04, 1.2765e-04], device='cuda:0') +2023-04-28 03:38:41,148 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4805, 1.3645, 0.5379, 1.2237, 1.4658, 1.3601, 1.2648, 1.3090], + device='cuda:0'), covar=tensor([0.0526, 0.0394, 0.0397, 0.0578, 0.0290, 0.0521, 0.0518, 0.0593], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0039, 0.0053, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 03:38:53,255 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8571, 1.6222, 4.4285, 4.1624, 3.8530, 4.1403, 4.1848, 3.9169], + device='cuda:0'), covar=tensor([0.6668, 0.5433, 0.1082, 0.1870, 0.1202, 0.1877, 0.1137, 0.1583], + device='cuda:0'), in_proj_covar=tensor([0.0310, 0.0310, 0.0406, 0.0408, 0.0351, 0.0417, 0.0320, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 03:38:54,999 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=160335.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:39:02,716 INFO [finetune.py:976] (0/7) Epoch 28, batch 5700, loss[loss=0.1419, simple_loss=0.2093, pruned_loss=0.03723, over 4270.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2382, pruned_loss=0.0457, over 931363.03 frames. ], batch size: 18, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:39:14,597 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160368.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 03:39:18,825 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-28.pt +2023-04-28 03:39:31,533 INFO [finetune.py:976] (0/7) Epoch 29, batch 0, loss[loss=0.1819, simple_loss=0.2468, pruned_loss=0.05854, over 4795.00 frames. ], tot_loss[loss=0.1819, simple_loss=0.2468, pruned_loss=0.05854, over 4795.00 frames. ], batch size: 51, lr: 2.86e-03, grad_scale: 32.0 +2023-04-28 03:39:31,534 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-28 03:39:42,780 INFO [finetune.py:1010] (0/7) Epoch 29, validation: loss=0.1546, simple_loss=0.2236, pruned_loss=0.04278, over 2265189.00 frames. +2023-04-28 03:39:42,780 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-28 03:39:44,400 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.451e+01 1.469e+02 1.726e+02 2.023e+02 3.272e+02, threshold=3.452e+02, percent-clipped=0.0 +2023-04-28 03:39:45,694 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7343, 3.5381, 0.8239, 1.9344, 2.1099, 2.5080, 2.1196, 1.0690], + device='cuda:0'), covar=tensor([0.1365, 0.0737, 0.2135, 0.1168, 0.0956, 0.1020, 0.1441, 0.1999], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0238, 0.0136, 0.0120, 0.0131, 0.0152, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 03:40:16,113 INFO [finetune.py:976] (0/7) Epoch 29, batch 50, loss[loss=0.181, simple_loss=0.2486, pruned_loss=0.05671, over 4820.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2412, pruned_loss=0.04796, over 216712.58 frames. ], batch size: 30, lr: 2.85e-03, grad_scale: 32.0 +2023-04-28 03:40:21,611 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-28 03:40:33,637 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.46 vs. limit=5.0 +2023-04-28 03:40:45,248 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.14 vs. limit=2.0 +2023-04-28 03:41:16,375 INFO [finetune.py:976] (0/7) Epoch 29, batch 100, loss[loss=0.1723, simple_loss=0.2483, pruned_loss=0.04816, over 4928.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2371, pruned_loss=0.04655, over 380914.46 frames. ], batch size: 33, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:41:24,293 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.006e+02 1.575e+02 1.942e+02 2.371e+02 3.324e+02, threshold=3.883e+02, percent-clipped=0.0 +2023-04-28 03:42:07,346 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160511.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:42:22,987 INFO [finetune.py:976] (0/7) Epoch 29, batch 150, loss[loss=0.1196, simple_loss=0.1973, pruned_loss=0.02091, over 4818.00 frames. ], tot_loss[loss=0.1601, simple_loss=0.2319, pruned_loss=0.04417, over 505846.02 frames. ], batch size: 38, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:42:43,472 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4451, 1.9856, 2.3468, 2.8165, 2.3065, 1.8519, 1.6240, 2.1681], + device='cuda:0'), covar=tensor([0.3222, 0.3016, 0.1581, 0.1994, 0.2659, 0.2615, 0.3866, 0.1948], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0247, 0.0228, 0.0315, 0.0223, 0.0236, 0.0229, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 03:43:05,315 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=160559.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:43:14,853 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160566.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:43:27,103 INFO [finetune.py:976] (0/7) Epoch 29, batch 200, loss[loss=0.1702, simple_loss=0.2258, pruned_loss=0.05731, over 4038.00 frames. ], tot_loss[loss=0.1587, simple_loss=0.2299, pruned_loss=0.04369, over 605150.26 frames. ], batch size: 65, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:43:34,234 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.002e+02 1.562e+02 1.851e+02 2.230e+02 3.985e+02, threshold=3.702e+02, percent-clipped=1.0 +2023-04-28 03:43:36,798 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9070, 4.2932, 0.6620, 2.4010, 2.4502, 2.7347, 2.4662, 1.0664], + device='cuda:0'), covar=tensor([0.1361, 0.1029, 0.2304, 0.1139, 0.0985, 0.1166, 0.1418, 0.2172], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0238, 0.0135, 0.0119, 0.0131, 0.0152, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 03:44:17,857 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=160614.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:44:30,032 INFO [finetune.py:976] (0/7) Epoch 29, batch 250, loss[loss=0.1364, simple_loss=0.2263, pruned_loss=0.02325, over 4863.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2327, pruned_loss=0.04314, over 682772.53 frames. ], batch size: 31, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:45:20,096 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=160663.0, num_to_drop=1, layers_to_drop={3} +2023-04-28 03:45:32,745 INFO [finetune.py:976] (0/7) Epoch 29, batch 300, loss[loss=0.143, simple_loss=0.2229, pruned_loss=0.03156, over 4771.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2362, pruned_loss=0.04392, over 744201.79 frames. ], batch size: 28, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:45:38,924 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.518e+02 1.888e+02 2.303e+02 4.692e+02, threshold=3.776e+02, percent-clipped=1.0 +2023-04-28 03:46:01,447 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.17 vs. limit=2.0 +2023-04-28 03:46:15,257 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-28 03:46:37,391 INFO [finetune.py:976] (0/7) Epoch 29, batch 350, loss[loss=0.18, simple_loss=0.2272, pruned_loss=0.06635, over 3465.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2387, pruned_loss=0.04492, over 789946.56 frames. ], batch size: 15, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:47:08,176 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7652, 1.5387, 1.7542, 2.1284, 2.1578, 1.7474, 1.5338, 1.9831], + device='cuda:0'), covar=tensor([0.0814, 0.1270, 0.0797, 0.0616, 0.0628, 0.0886, 0.0736, 0.0532], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0205, 0.0184, 0.0171, 0.0178, 0.0177, 0.0151, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 03:47:35,563 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-28 03:47:45,907 INFO [finetune.py:976] (0/7) Epoch 29, batch 400, loss[loss=0.1696, simple_loss=0.2446, pruned_loss=0.04727, over 4924.00 frames. ], tot_loss[loss=0.1675, simple_loss=0.2429, pruned_loss=0.04606, over 827670.96 frames. ], batch size: 38, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:47:47,780 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.194e+02 1.618e+02 1.920e+02 2.400e+02 5.071e+02, threshold=3.839e+02, percent-clipped=2.0 +2023-04-28 03:48:21,590 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7278, 1.3120, 1.3839, 1.4761, 1.8690, 1.5599, 1.3278, 1.3109], + device='cuda:0'), covar=tensor([0.1505, 0.1431, 0.1814, 0.1332, 0.0894, 0.1341, 0.1690, 0.2202], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0305, 0.0346, 0.0284, 0.0322, 0.0302, 0.0297, 0.0372], + device='cuda:0'), out_proj_covar=tensor([6.3550e-05, 6.2497e-05, 7.2385e-05, 5.6854e-05, 6.5639e-05, 6.2742e-05, + 6.1197e-05, 7.8507e-05], device='cuda:0') +2023-04-28 03:48:36,948 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7761, 1.9997, 0.9862, 1.5065, 2.0254, 1.6912, 1.5587, 1.6370], + device='cuda:0'), covar=tensor([0.0461, 0.0327, 0.0309, 0.0515, 0.0240, 0.0474, 0.0442, 0.0525], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0039, 0.0053, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 03:48:48,103 INFO [finetune.py:976] (0/7) Epoch 29, batch 450, loss[loss=0.1473, simple_loss=0.2286, pruned_loss=0.03294, over 4774.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2424, pruned_loss=0.04644, over 855276.82 frames. ], batch size: 26, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:49:19,853 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.8961, 3.8411, 2.7181, 4.5204, 3.9153, 3.9321, 1.5632, 3.8436], + device='cuda:0'), covar=tensor([0.1613, 0.1182, 0.3397, 0.1269, 0.2433, 0.1604, 0.6199, 0.2431], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0223, 0.0254, 0.0308, 0.0303, 0.0254, 0.0280, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 03:49:51,964 INFO [finetune.py:976] (0/7) Epoch 29, batch 500, loss[loss=0.1871, simple_loss=0.246, pruned_loss=0.06413, over 4911.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2406, pruned_loss=0.04602, over 879277.37 frames. ], batch size: 46, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:49:53,843 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.077e+02 1.560e+02 1.871e+02 2.250e+02 5.218e+02, threshold=3.742e+02, percent-clipped=1.0 +2023-04-28 03:50:02,717 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-28 03:50:25,300 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160901.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:50:56,030 INFO [finetune.py:976] (0/7) Epoch 29, batch 550, loss[loss=0.1502, simple_loss=0.2233, pruned_loss=0.03855, over 4902.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2366, pruned_loss=0.04529, over 897815.09 frames. ], batch size: 43, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:51:43,446 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=160962.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:51:49,187 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=160963.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 03:51:50,431 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=160965.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:51:51,634 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3470, 1.3094, 1.6753, 1.6331, 1.2258, 1.1842, 1.3090, 0.8821], + device='cuda:0'), covar=tensor([0.0539, 0.0589, 0.0366, 0.0682, 0.0744, 0.1003, 0.0603, 0.0535], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0066, 0.0065, 0.0068, 0.0074, 0.0093, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 03:52:01,358 INFO [finetune.py:976] (0/7) Epoch 29, batch 600, loss[loss=0.1627, simple_loss=0.2501, pruned_loss=0.03768, over 4835.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2363, pruned_loss=0.04544, over 909821.11 frames. ], batch size: 47, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:52:03,142 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.581e+02 1.979e+02 2.385e+02 4.353e+02, threshold=3.958e+02, percent-clipped=2.0 +2023-04-28 03:52:14,286 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-28 03:52:35,722 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=161011.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 03:52:44,116 INFO [finetune.py:976] (0/7) Epoch 29, batch 650, loss[loss=0.2225, simple_loss=0.2819, pruned_loss=0.08159, over 4826.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2402, pruned_loss=0.04632, over 919134.22 frames. ], batch size: 39, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:52:44,863 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161026.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:52:58,274 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.87 vs. limit=5.0 +2023-04-28 03:53:08,776 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3823, 1.6534, 1.8295, 1.9481, 1.8287, 1.8920, 1.9207, 1.9255], + device='cuda:0'), covar=tensor([0.3464, 0.4868, 0.4261, 0.4200, 0.5038, 0.6528, 0.4749, 0.4207], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0376, 0.0332, 0.0344, 0.0352, 0.0396, 0.0365, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 03:53:17,105 INFO [finetune.py:976] (0/7) Epoch 29, batch 700, loss[loss=0.2154, simple_loss=0.2804, pruned_loss=0.07526, over 4886.00 frames. ], tot_loss[loss=0.1676, simple_loss=0.2415, pruned_loss=0.04685, over 926890.78 frames. ], batch size: 32, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:53:18,919 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.470e+01 1.599e+02 1.884e+02 2.263e+02 4.345e+02, threshold=3.768e+02, percent-clipped=2.0 +2023-04-28 03:53:41,704 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-28 03:53:50,558 INFO [finetune.py:976] (0/7) Epoch 29, batch 750, loss[loss=0.2012, simple_loss=0.2722, pruned_loss=0.06511, over 4901.00 frames. ], tot_loss[loss=0.1692, simple_loss=0.2433, pruned_loss=0.04754, over 933688.37 frames. ], batch size: 37, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:54:24,408 INFO [finetune.py:976] (0/7) Epoch 29, batch 800, loss[loss=0.1639, simple_loss=0.2512, pruned_loss=0.03831, over 4815.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2424, pruned_loss=0.04674, over 937647.98 frames. ], batch size: 33, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:54:25,685 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.4454, 3.4298, 2.5284, 4.0437, 3.4036, 3.4796, 1.4235, 3.3691], + device='cuda:0'), covar=tensor([0.1930, 0.1406, 0.3683, 0.1953, 0.3023, 0.1967, 0.6161, 0.2689], + device='cuda:0'), in_proj_covar=tensor([0.0249, 0.0222, 0.0253, 0.0307, 0.0302, 0.0253, 0.0280, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 03:54:26,204 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.992e+01 1.674e+02 1.972e+02 2.388e+02 4.465e+02, threshold=3.944e+02, percent-clipped=4.0 +2023-04-28 03:54:52,574 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161216.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:54:53,802 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0594, 2.4834, 1.0705, 1.2787, 1.8853, 1.1384, 3.1756, 1.5597], + device='cuda:0'), covar=tensor([0.0666, 0.0639, 0.0789, 0.1146, 0.0466, 0.0980, 0.0214, 0.0609], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0065, 0.0048, 0.0046, 0.0049, 0.0051, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 03:54:54,447 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8112, 1.3025, 1.8647, 2.2378, 1.9156, 1.7823, 1.8306, 1.7575], + device='cuda:0'), covar=tensor([0.4146, 0.6587, 0.5542, 0.5452, 0.5258, 0.7167, 0.7169, 0.8215], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0427, 0.0523, 0.0513, 0.0476, 0.0513, 0.0516, 0.0530], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 03:54:57,898 INFO [finetune.py:976] (0/7) Epoch 29, batch 850, loss[loss=0.1166, simple_loss=0.1904, pruned_loss=0.02142, over 4721.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2393, pruned_loss=0.04578, over 941342.17 frames. ], batch size: 59, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:55:01,667 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161231.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:55:29,542 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161257.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:55:30,705 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6869, 2.0854, 1.9204, 2.0093, 1.6783, 1.7750, 1.6663, 1.4995], + device='cuda:0'), covar=tensor([0.1851, 0.1209, 0.0751, 0.1165, 0.3350, 0.1108, 0.1888, 0.2408], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0298, 0.0215, 0.0275, 0.0312, 0.0252, 0.0247, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1192e-04, 1.1731e-04, 8.4497e-05, 1.0799e-04, 1.2557e-04, 9.9144e-05, + 9.9624e-05, 1.0360e-04], device='cuda:0') +2023-04-28 03:55:49,585 INFO [finetune.py:976] (0/7) Epoch 29, batch 900, loss[loss=0.1636, simple_loss=0.2298, pruned_loss=0.0487, over 4857.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2359, pruned_loss=0.0444, over 946605.59 frames. ], batch size: 47, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:55:50,925 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161277.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:55:51,421 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.567e+01 1.487e+02 1.850e+02 2.194e+02 4.508e+02, threshold=3.700e+02, percent-clipped=1.0 +2023-04-28 03:56:00,188 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161292.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:56:13,148 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.7114, 2.2743, 2.6098, 3.1546, 2.5271, 2.1360, 2.0710, 2.5912], + device='cuda:0'), covar=tensor([0.3134, 0.3116, 0.1521, 0.2350, 0.2779, 0.2467, 0.3665, 0.1881], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0246, 0.0228, 0.0313, 0.0223, 0.0235, 0.0229, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 03:56:21,349 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161321.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:56:23,751 INFO [finetune.py:976] (0/7) Epoch 29, batch 950, loss[loss=0.1576, simple_loss=0.2312, pruned_loss=0.04198, over 4886.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.234, pruned_loss=0.0441, over 948239.18 frames. ], batch size: 32, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:56:26,433 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-28 03:56:33,580 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5224, 1.3446, 0.5115, 1.2243, 1.3606, 1.3859, 1.2777, 1.3160], + device='cuda:0'), covar=tensor([0.0480, 0.0367, 0.0380, 0.0540, 0.0292, 0.0473, 0.0464, 0.0543], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0047, 0.0039, 0.0053, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 03:57:07,673 INFO [finetune.py:976] (0/7) Epoch 29, batch 1000, loss[loss=0.2092, simple_loss=0.2858, pruned_loss=0.06629, over 4822.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2366, pruned_loss=0.04519, over 949022.81 frames. ], batch size: 39, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:57:09,494 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.091e+02 1.605e+02 1.983e+02 2.337e+02 3.564e+02, threshold=3.965e+02, percent-clipped=0.0 +2023-04-28 03:57:39,572 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161401.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:58:11,413 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-28 03:58:12,304 INFO [finetune.py:976] (0/7) Epoch 29, batch 1050, loss[loss=0.2053, simple_loss=0.2917, pruned_loss=0.05949, over 4844.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2389, pruned_loss=0.04573, over 948528.80 frames. ], batch size: 49, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:58:25,890 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2560, 1.7972, 2.2323, 2.6530, 2.1655, 1.6513, 1.4367, 1.9376], + device='cuda:0'), covar=tensor([0.2946, 0.2889, 0.1527, 0.1910, 0.2479, 0.2634, 0.3976, 0.1776], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0245, 0.0227, 0.0312, 0.0222, 0.0234, 0.0228, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 03:58:55,488 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161460.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:58:56,752 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161462.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 03:58:58,448 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4377, 1.8756, 1.6406, 2.2731, 2.4363, 2.0352, 2.0153, 1.6810], + device='cuda:0'), covar=tensor([0.1564, 0.1658, 0.2192, 0.1622, 0.1434, 0.1947, 0.1810, 0.2174], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0310, 0.0352, 0.0289, 0.0328, 0.0308, 0.0302, 0.0378], + device='cuda:0'), out_proj_covar=tensor([6.4545e-05, 6.3442e-05, 7.3609e-05, 5.7868e-05, 6.6960e-05, 6.3939e-05, + 6.2203e-05, 7.9814e-05], device='cuda:0') +2023-04-28 03:59:17,655 INFO [finetune.py:976] (0/7) Epoch 29, batch 1100, loss[loss=0.1926, simple_loss=0.2738, pruned_loss=0.0557, over 4816.00 frames. ], tot_loss[loss=0.167, simple_loss=0.2405, pruned_loss=0.04674, over 951276.41 frames. ], batch size: 39, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 03:59:20,407 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.507e+02 1.843e+02 2.398e+02 4.910e+02, threshold=3.687e+02, percent-clipped=4.0 +2023-04-28 04:00:20,261 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161521.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:00:23,055 INFO [finetune.py:976] (0/7) Epoch 29, batch 1150, loss[loss=0.1695, simple_loss=0.2479, pruned_loss=0.04558, over 4743.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.242, pruned_loss=0.04709, over 953297.70 frames. ], batch size: 27, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 04:00:36,375 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-28 04:01:04,574 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161557.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:01:25,593 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161572.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:01:27,863 INFO [finetune.py:976] (0/7) Epoch 29, batch 1200, loss[loss=0.149, simple_loss=0.2187, pruned_loss=0.03967, over 4706.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2397, pruned_loss=0.04661, over 951960.35 frames. ], batch size: 23, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 04:01:29,682 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.012e+02 1.552e+02 1.827e+02 2.258e+02 5.032e+02, threshold=3.654e+02, percent-clipped=3.0 +2023-04-28 04:01:29,834 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0467, 1.5659, 1.8999, 1.7502, 1.9092, 1.5466, 0.9059, 1.5037], + device='cuda:0'), covar=tensor([0.2987, 0.2949, 0.1612, 0.2045, 0.2097, 0.2373, 0.3894, 0.1848], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0246, 0.0227, 0.0313, 0.0223, 0.0234, 0.0228, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 04:01:47,188 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161587.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:02:09,297 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=161605.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:02:23,984 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161621.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:02:33,462 INFO [finetune.py:976] (0/7) Epoch 29, batch 1250, loss[loss=0.159, simple_loss=0.2394, pruned_loss=0.03928, over 4825.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2375, pruned_loss=0.04578, over 952489.16 frames. ], batch size: 39, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 04:03:28,497 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=161669.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:03:38,174 INFO [finetune.py:976] (0/7) Epoch 29, batch 1300, loss[loss=0.1338, simple_loss=0.2065, pruned_loss=0.0305, over 4771.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2351, pruned_loss=0.04481, over 954581.27 frames. ], batch size: 26, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 04:03:46,269 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.807e+01 1.476e+02 1.722e+02 2.175e+02 4.011e+02, threshold=3.444e+02, percent-clipped=1.0 +2023-04-28 04:04:19,902 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6611, 3.4431, 2.9231, 3.1628, 2.3994, 2.8428, 2.9438, 2.2014], + device='cuda:0'), covar=tensor([0.2030, 0.0980, 0.0663, 0.1186, 0.2920, 0.1212, 0.1856, 0.2895], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0298, 0.0215, 0.0274, 0.0311, 0.0252, 0.0246, 0.0262], + device='cuda:0'), out_proj_covar=tensor([1.1169e-04, 1.1720e-04, 8.4314e-05, 1.0783e-04, 1.2514e-04, 9.8924e-05, + 9.9021e-05, 1.0342e-04], device='cuda:0') +2023-04-28 04:04:42,651 INFO [finetune.py:976] (0/7) Epoch 29, batch 1350, loss[loss=0.173, simple_loss=0.2528, pruned_loss=0.04662, over 4911.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2361, pruned_loss=0.04524, over 954988.16 frames. ], batch size: 36, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 04:05:27,898 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161757.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:05:50,109 INFO [finetune.py:976] (0/7) Epoch 29, batch 1400, loss[loss=0.1517, simple_loss=0.2341, pruned_loss=0.0346, over 4763.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.24, pruned_loss=0.04664, over 956835.16 frames. ], batch size: 27, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 04:05:58,002 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.100e+02 1.598e+02 1.895e+02 2.318e+02 6.343e+02, threshold=3.789e+02, percent-clipped=7.0 +2023-04-28 04:06:51,769 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=161815.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:06:52,358 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=161816.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:07:03,300 INFO [finetune.py:976] (0/7) Epoch 29, batch 1450, loss[loss=0.174, simple_loss=0.2484, pruned_loss=0.04981, over 4857.00 frames. ], tot_loss[loss=0.1681, simple_loss=0.2418, pruned_loss=0.04717, over 957404.15 frames. ], batch size: 31, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 04:07:35,914 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.10 vs. limit=2.0 +2023-04-28 04:07:48,590 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6402, 2.3284, 2.5695, 3.0657, 2.5637, 2.1867, 2.0809, 2.3939], + device='cuda:0'), covar=tensor([0.3152, 0.2687, 0.1593, 0.1866, 0.2802, 0.2521, 0.3477, 0.1843], + device='cuda:0'), in_proj_covar=tensor([0.0289, 0.0244, 0.0226, 0.0311, 0.0221, 0.0233, 0.0226, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 04:08:00,368 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161872.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:08:01,599 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9712, 1.7373, 1.8985, 2.3261, 2.3579, 1.8771, 1.6717, 2.0472], + device='cuda:0'), covar=tensor([0.0741, 0.1091, 0.0729, 0.0495, 0.0611, 0.0803, 0.0716, 0.0520], + device='cuda:0'), in_proj_covar=tensor([0.0184, 0.0202, 0.0183, 0.0170, 0.0177, 0.0177, 0.0151, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 04:08:02,915 INFO [finetune.py:976] (0/7) Epoch 29, batch 1500, loss[loss=0.1806, simple_loss=0.2486, pruned_loss=0.0563, over 4750.00 frames. ], tot_loss[loss=0.1685, simple_loss=0.2423, pruned_loss=0.04731, over 956892.39 frames. ], batch size: 59, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 04:08:03,660 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=161876.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:08:04,724 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.141e+02 1.641e+02 1.910e+02 2.350e+02 4.691e+02, threshold=3.820e+02, percent-clipped=1.0 +2023-04-28 04:08:05,433 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3765, 3.2303, 1.0812, 1.7894, 1.7670, 2.3061, 1.9275, 1.0436], + device='cuda:0'), covar=tensor([0.1313, 0.0856, 0.1694, 0.1152, 0.1017, 0.0900, 0.1385, 0.1875], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0238, 0.0136, 0.0120, 0.0131, 0.0152, 0.0117, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:08:15,984 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=161887.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:09:01,014 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=161920.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:09:09,619 INFO [finetune.py:976] (0/7) Epoch 29, batch 1550, loss[loss=0.1614, simple_loss=0.2349, pruned_loss=0.04394, over 4736.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2413, pruned_loss=0.04604, over 956469.86 frames. ], batch size: 23, lr: 2.85e-03, grad_scale: 16.0 +2023-04-28 04:09:21,314 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=161935.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:10:14,712 INFO [finetune.py:976] (0/7) Epoch 29, batch 1600, loss[loss=0.1809, simple_loss=0.2545, pruned_loss=0.05366, over 4802.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2404, pruned_loss=0.04574, over 957470.30 frames. ], batch size: 51, lr: 2.84e-03, grad_scale: 16.0 +2023-04-28 04:10:16,469 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 7.098e+01 1.531e+02 1.823e+02 2.197e+02 4.043e+02, threshold=3.646e+02, percent-clipped=1.0 +2023-04-28 04:10:48,420 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-162000.pt +2023-04-28 04:11:22,117 INFO [finetune.py:976] (0/7) Epoch 29, batch 1650, loss[loss=0.1263, simple_loss=0.1976, pruned_loss=0.02754, over 4826.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.237, pruned_loss=0.04481, over 958611.64 frames. ], batch size: 39, lr: 2.84e-03, grad_scale: 16.0 +2023-04-28 04:12:07,041 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162057.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:12:29,332 INFO [finetune.py:976] (0/7) Epoch 29, batch 1700, loss[loss=0.1931, simple_loss=0.2669, pruned_loss=0.05969, over 4822.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2353, pruned_loss=0.04471, over 957764.79 frames. ], batch size: 40, lr: 2.84e-03, grad_scale: 16.0 +2023-04-28 04:12:36,093 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162077.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:12:36,610 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.915e+01 1.582e+02 1.975e+02 2.280e+02 6.731e+02, threshold=3.951e+02, percent-clipped=4.0 +2023-04-28 04:13:11,490 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=162105.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:13:24,514 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162116.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:13:33,002 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0601, 1.7817, 1.9778, 2.4008, 2.4332, 1.9095, 1.7725, 2.0414], + device='cuda:0'), covar=tensor([0.0770, 0.1112, 0.0781, 0.0541, 0.0584, 0.0818, 0.0719, 0.0560], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0204, 0.0185, 0.0171, 0.0178, 0.0178, 0.0152, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 04:13:34,109 INFO [finetune.py:976] (0/7) Epoch 29, batch 1750, loss[loss=0.1712, simple_loss=0.2546, pruned_loss=0.04393, over 4807.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2379, pruned_loss=0.04589, over 956267.95 frames. ], batch size: 51, lr: 2.84e-03, grad_scale: 16.0 +2023-04-28 04:13:51,944 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162138.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:14:27,294 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=162164.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:14:37,335 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162171.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:14:45,214 INFO [finetune.py:976] (0/7) Epoch 29, batch 1800, loss[loss=0.1939, simple_loss=0.2661, pruned_loss=0.06086, over 4857.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2403, pruned_loss=0.04615, over 957183.54 frames. ], batch size: 49, lr: 2.84e-03, grad_scale: 16.0 +2023-04-28 04:14:47,023 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.098e+02 1.596e+02 1.871e+02 2.266e+02 6.327e+02, threshold=3.743e+02, percent-clipped=2.0 +2023-04-28 04:15:49,784 INFO [finetune.py:976] (0/7) Epoch 29, batch 1850, loss[loss=0.1567, simple_loss=0.2386, pruned_loss=0.03745, over 4745.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2419, pruned_loss=0.04686, over 957211.53 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 16.0 +2023-04-28 04:15:53,541 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162231.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:16:53,967 INFO [finetune.py:976] (0/7) Epoch 29, batch 1900, loss[loss=0.1352, simple_loss=0.2195, pruned_loss=0.02546, over 4791.00 frames. ], tot_loss[loss=0.1688, simple_loss=0.243, pruned_loss=0.04729, over 956429.72 frames. ], batch size: 25, lr: 2.84e-03, grad_scale: 16.0 +2023-04-28 04:16:55,788 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.192e+01 1.518e+02 1.776e+02 2.128e+02 3.542e+02, threshold=3.553e+02, percent-clipped=0.0 +2023-04-28 04:16:56,551 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1145, 1.9530, 2.6470, 2.6920, 1.9238, 1.7044, 2.0302, 1.1699], + device='cuda:0'), covar=tensor([0.0522, 0.0796, 0.0292, 0.0725, 0.0712, 0.1058, 0.0732, 0.0705], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0065, 0.0069, 0.0075, 0.0094, 0.0073, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:17:14,621 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162292.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:17:58,314 INFO [finetune.py:976] (0/7) Epoch 29, batch 1950, loss[loss=0.1464, simple_loss=0.2181, pruned_loss=0.03732, over 4711.00 frames. ], tot_loss[loss=0.1656, simple_loss=0.2399, pruned_loss=0.04564, over 956081.24 frames. ], batch size: 23, lr: 2.84e-03, grad_scale: 16.0 +2023-04-28 04:19:03,738 INFO [finetune.py:976] (0/7) Epoch 29, batch 2000, loss[loss=0.1654, simple_loss=0.2273, pruned_loss=0.05179, over 4893.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2374, pruned_loss=0.04535, over 954645.61 frames. ], batch size: 35, lr: 2.84e-03, grad_scale: 16.0 +2023-04-28 04:19:05,552 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.225e+01 1.505e+02 1.783e+02 2.109e+02 5.594e+02, threshold=3.566e+02, percent-clipped=2.0 +2023-04-28 04:19:43,204 INFO [finetune.py:976] (0/7) Epoch 29, batch 2050, loss[loss=0.1338, simple_loss=0.1999, pruned_loss=0.03387, over 4814.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2343, pruned_loss=0.04424, over 954688.46 frames. ], batch size: 25, lr: 2.84e-03, grad_scale: 16.0 +2023-04-28 04:19:48,150 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162433.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:19:58,565 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9891, 1.4539, 1.8183, 1.7428, 1.7765, 1.4221, 0.7895, 1.5010], + device='cuda:0'), covar=tensor([0.3061, 0.3110, 0.1645, 0.2061, 0.2315, 0.2601, 0.4200, 0.1929], + device='cuda:0'), in_proj_covar=tensor([0.0290, 0.0246, 0.0227, 0.0312, 0.0222, 0.0235, 0.0227, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 04:20:13,305 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162471.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:20:16,687 INFO [finetune.py:976] (0/7) Epoch 29, batch 2100, loss[loss=0.1671, simple_loss=0.2462, pruned_loss=0.04403, over 4787.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2354, pruned_loss=0.04504, over 953540.86 frames. ], batch size: 25, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:20:19,020 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.161e+01 1.530e+02 1.785e+02 2.249e+02 3.474e+02, threshold=3.570e+02, percent-clipped=1.0 +2023-04-28 04:20:39,344 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162511.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:20:45,708 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=162519.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:20:50,368 INFO [finetune.py:976] (0/7) Epoch 29, batch 2150, loss[loss=0.1494, simple_loss=0.2178, pruned_loss=0.04051, over 4761.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2372, pruned_loss=0.04511, over 954656.32 frames. ], batch size: 28, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:21:21,599 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162572.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 04:21:22,850 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.44 vs. limit=5.0 +2023-04-28 04:21:23,762 INFO [finetune.py:976] (0/7) Epoch 29, batch 2200, loss[loss=0.1471, simple_loss=0.2196, pruned_loss=0.03729, over 4797.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2405, pruned_loss=0.04618, over 955493.47 frames. ], batch size: 25, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:21:26,030 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.398e+01 1.632e+02 1.891e+02 2.223e+02 3.490e+02, threshold=3.782e+02, percent-clipped=0.0 +2023-04-28 04:21:37,935 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162587.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:22:23,020 INFO [finetune.py:976] (0/7) Epoch 29, batch 2250, loss[loss=0.1769, simple_loss=0.2524, pruned_loss=0.05073, over 4826.00 frames. ], tot_loss[loss=0.1679, simple_loss=0.2426, pruned_loss=0.04657, over 955931.72 frames. ], batch size: 47, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:23:28,886 INFO [finetune.py:976] (0/7) Epoch 29, batch 2300, loss[loss=0.1641, simple_loss=0.235, pruned_loss=0.04659, over 4700.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2423, pruned_loss=0.04607, over 954616.82 frames. ], batch size: 59, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:23:36,409 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.802e+01 1.609e+02 1.813e+02 2.047e+02 3.617e+02, threshold=3.626e+02, percent-clipped=0.0 +2023-04-28 04:23:47,271 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4339, 3.0533, 0.8750, 1.7208, 1.8228, 2.2002, 1.8385, 0.9675], + device='cuda:0'), covar=tensor([0.1314, 0.0853, 0.1856, 0.1221, 0.1011, 0.0975, 0.1416, 0.1872], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0237, 0.0136, 0.0120, 0.0131, 0.0153, 0.0117, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:24:17,884 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.46 vs. limit=5.0 +2023-04-28 04:24:21,435 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.9018, 2.7786, 2.2276, 3.3191, 2.9282, 2.9087, 1.2251, 2.8306], + device='cuda:0'), covar=tensor([0.2154, 0.1749, 0.3277, 0.2900, 0.3011, 0.2189, 0.5652, 0.2928], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0221, 0.0254, 0.0307, 0.0304, 0.0254, 0.0278, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 04:24:31,809 INFO [finetune.py:976] (0/7) Epoch 29, batch 2350, loss[loss=0.1321, simple_loss=0.2074, pruned_loss=0.02838, over 4770.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2396, pruned_loss=0.04531, over 955429.87 frames. ], batch size: 28, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:24:42,013 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162733.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:25:30,907 INFO [finetune.py:976] (0/7) Epoch 29, batch 2400, loss[loss=0.1518, simple_loss=0.2213, pruned_loss=0.04113, over 4905.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2366, pruned_loss=0.04448, over 957145.19 frames. ], batch size: 36, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:25:33,147 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.191e+01 1.527e+02 1.810e+02 2.223e+02 4.938e+02, threshold=3.619e+02, percent-clipped=3.0 +2023-04-28 04:25:36,109 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=162781.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:26:04,586 INFO [finetune.py:976] (0/7) Epoch 29, batch 2450, loss[loss=0.1438, simple_loss=0.2288, pruned_loss=0.02942, over 4817.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2344, pruned_loss=0.04394, over 957015.60 frames. ], batch size: 45, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:26:05,927 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4494, 3.3352, 1.2073, 1.8471, 1.8487, 2.3735, 1.9548, 0.9907], + device='cuda:0'), covar=tensor([0.1444, 0.0828, 0.1611, 0.1182, 0.1083, 0.0968, 0.1515, 0.1944], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0237, 0.0136, 0.0120, 0.0131, 0.0153, 0.0116, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:26:05,952 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=162827.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:26:33,768 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=162867.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 04:26:38,582 INFO [finetune.py:976] (0/7) Epoch 29, batch 2500, loss[loss=0.1576, simple_loss=0.2432, pruned_loss=0.03606, over 4930.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2364, pruned_loss=0.045, over 958018.96 frames. ], batch size: 38, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:26:40,367 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.423e+01 1.461e+02 1.777e+02 2.062e+02 3.489e+02, threshold=3.555e+02, percent-clipped=0.0 +2023-04-28 04:26:47,952 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=162887.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:26:48,598 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=162888.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:26:55,106 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.47 vs. limit=5.0 +2023-04-28 04:27:03,462 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-28 04:27:12,362 INFO [finetune.py:976] (0/7) Epoch 29, batch 2550, loss[loss=0.182, simple_loss=0.2477, pruned_loss=0.05815, over 4842.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2392, pruned_loss=0.04558, over 954441.52 frames. ], batch size: 49, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:27:18,478 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7332, 1.6380, 2.1486, 2.1816, 1.5225, 1.4314, 1.7378, 0.9489], + device='cuda:0'), covar=tensor([0.0691, 0.0618, 0.0434, 0.0683, 0.0746, 0.1104, 0.0616, 0.0753], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0067, 0.0065, 0.0070, 0.0076, 0.0095, 0.0073, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:27:19,032 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=162935.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:27:19,267 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.89 vs. limit=2.0 +2023-04-28 04:27:21,884 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2463, 2.1180, 1.9035, 1.8520, 2.3700, 1.7921, 2.8189, 1.6302], + device='cuda:0'), covar=tensor([0.3422, 0.2066, 0.4667, 0.2740, 0.1540, 0.2529, 0.1218, 0.4489], + device='cuda:0'), in_proj_covar=tensor([0.0338, 0.0353, 0.0423, 0.0354, 0.0384, 0.0375, 0.0371, 0.0425], + device='cuda:0'), out_proj_covar=tensor([9.9734e-05, 1.0505e-04, 1.2780e-04, 1.0572e-04, 1.1363e-04, 1.1132e-04, + 1.0802e-04, 1.2745e-04], device='cuda:0') +2023-04-28 04:27:46,026 INFO [finetune.py:976] (0/7) Epoch 29, batch 2600, loss[loss=0.1533, simple_loss=0.2354, pruned_loss=0.03558, over 4733.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2412, pruned_loss=0.04591, over 954360.10 frames. ], batch size: 54, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:27:47,805 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.782e+01 1.602e+02 1.933e+02 2.328e+02 3.675e+02, threshold=3.867e+02, percent-clipped=1.0 +2023-04-28 04:28:11,105 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7729, 1.8439, 1.2193, 1.5138, 2.0845, 1.6495, 1.5646, 1.5898], + device='cuda:0'), covar=tensor([0.0388, 0.0285, 0.0302, 0.0431, 0.0263, 0.0395, 0.0369, 0.0471], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0039, 0.0053, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 04:28:19,510 INFO [finetune.py:976] (0/7) Epoch 29, batch 2650, loss[loss=0.1508, simple_loss=0.238, pruned_loss=0.03178, over 4761.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2407, pruned_loss=0.04569, over 952583.50 frames. ], batch size: 51, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:28:30,393 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3587, 1.8581, 2.2706, 2.7175, 2.2737, 1.7657, 1.5536, 2.0644], + device='cuda:0'), covar=tensor([0.3113, 0.3006, 0.1539, 0.2038, 0.2386, 0.2594, 0.3795, 0.1892], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0245, 0.0226, 0.0312, 0.0221, 0.0234, 0.0226, 0.0184], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 04:28:44,479 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6855, 1.5368, 2.0005, 1.9890, 1.4854, 1.4108, 1.6097, 1.1332], + device='cuda:0'), covar=tensor([0.0554, 0.0617, 0.0381, 0.0679, 0.0739, 0.1112, 0.0583, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0072, 0.0068, 0.0066, 0.0070, 0.0076, 0.0096, 0.0074, 0.0063], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:28:57,567 INFO [finetune.py:976] (0/7) Epoch 29, batch 2700, loss[loss=0.1759, simple_loss=0.2371, pruned_loss=0.05736, over 4760.00 frames. ], tot_loss[loss=0.1652, simple_loss=0.2396, pruned_loss=0.04538, over 955245.77 frames. ], batch size: 23, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:29:04,329 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.042e+02 1.516e+02 1.799e+02 2.193e+02 4.304e+02, threshold=3.599e+02, percent-clipped=1.0 +2023-04-28 04:29:17,272 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163090.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:29:46,790 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0489, 1.7604, 2.2175, 2.3606, 2.1002, 1.9924, 2.1152, 2.0545], + device='cuda:0'), covar=tensor([0.4374, 0.6843, 0.6764, 0.5405, 0.5612, 0.7828, 0.8497, 0.9645], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0425, 0.0520, 0.0508, 0.0475, 0.0511, 0.0515, 0.0529], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 04:29:57,693 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-28 04:29:58,665 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7090, 1.9769, 1.9954, 2.1878, 2.1163, 2.3108, 1.8284, 3.9613], + device='cuda:0'), covar=tensor([0.0517, 0.0695, 0.0682, 0.1055, 0.0549, 0.0440, 0.0615, 0.0117], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0042, 0.0040, 0.0037, 0.0037, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 04:30:02,212 INFO [finetune.py:976] (0/7) Epoch 29, batch 2750, loss[loss=0.145, simple_loss=0.2219, pruned_loss=0.03404, over 4904.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2366, pruned_loss=0.04489, over 955326.15 frames. ], batch size: 32, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:30:20,478 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-28 04:30:38,985 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163151.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:30:50,677 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163167.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 04:31:01,090 INFO [finetune.py:976] (0/7) Epoch 29, batch 2800, loss[loss=0.1565, simple_loss=0.2292, pruned_loss=0.04193, over 4894.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2334, pruned_loss=0.04401, over 950879.17 frames. ], batch size: 32, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:31:08,097 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.000e+02 1.442e+02 1.723e+02 2.053e+02 5.173e+02, threshold=3.446e+02, percent-clipped=1.0 +2023-04-28 04:31:11,255 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163183.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:31:18,634 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7214, 1.3435, 1.7753, 2.3099, 1.8721, 1.6742, 1.7443, 1.6899], + device='cuda:0'), covar=tensor([0.4114, 0.6370, 0.5275, 0.4587, 0.5346, 0.6905, 0.7290, 0.8818], + device='cuda:0'), in_proj_covar=tensor([0.0449, 0.0425, 0.0520, 0.0508, 0.0475, 0.0511, 0.0516, 0.0530], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 04:31:52,447 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=163215.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:31:59,019 INFO [finetune.py:976] (0/7) Epoch 29, batch 2850, loss[loss=0.2031, simple_loss=0.2758, pruned_loss=0.06515, over 4815.00 frames. ], tot_loss[loss=0.16, simple_loss=0.2328, pruned_loss=0.04359, over 948992.67 frames. ], batch size: 40, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:32:08,304 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=163240.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:32:17,191 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.9747, 3.7000, 3.1247, 4.5089, 3.6547, 3.8546, 2.0052, 3.9816], + device='cuda:0'), covar=tensor([0.1613, 0.1120, 0.4405, 0.1055, 0.2588, 0.1690, 0.4876, 0.2024], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0222, 0.0255, 0.0308, 0.0305, 0.0255, 0.0279, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 04:32:30,357 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.70 vs. limit=5.0 +2023-04-28 04:32:31,404 INFO [finetune.py:976] (0/7) Epoch 29, batch 2900, loss[loss=0.1602, simple_loss=0.2372, pruned_loss=0.04159, over 4867.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2343, pruned_loss=0.04435, over 949330.79 frames. ], batch size: 34, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:32:33,719 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.435e+02 1.823e+02 2.184e+02 4.986e+02, threshold=3.647e+02, percent-clipped=1.0 +2023-04-28 04:32:47,878 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=163301.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:33:04,182 INFO [finetune.py:976] (0/7) Epoch 29, batch 2950, loss[loss=0.1691, simple_loss=0.2453, pruned_loss=0.04642, over 4697.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2377, pruned_loss=0.04532, over 947276.55 frames. ], batch size: 59, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:33:12,086 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8825, 3.9922, 0.8826, 2.1816, 2.3786, 2.7025, 2.4195, 0.8811], + device='cuda:0'), covar=tensor([0.1393, 0.1009, 0.2151, 0.1284, 0.1027, 0.1149, 0.1373, 0.2292], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0238, 0.0136, 0.0120, 0.0131, 0.0152, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:33:37,006 INFO [finetune.py:976] (0/7) Epoch 29, batch 3000, loss[loss=0.1818, simple_loss=0.2621, pruned_loss=0.05074, over 4834.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2407, pruned_loss=0.04615, over 949009.90 frames. ], batch size: 49, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:33:37,007 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-28 04:33:41,678 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8538, 2.2671, 1.8058, 1.5625, 1.3997, 1.4217, 1.8381, 1.3800], + device='cuda:0'), covar=tensor([0.1628, 0.1215, 0.1384, 0.1631, 0.2298, 0.1872, 0.0965, 0.2048], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0209, 0.0170, 0.0203, 0.0200, 0.0186, 0.0156, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 04:33:47,842 INFO [finetune.py:1010] (0/7) Epoch 29, validation: loss=0.1535, simple_loss=0.222, pruned_loss=0.04252, over 2265189.00 frames. +2023-04-28 04:33:47,842 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-28 04:33:49,655 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.160e+02 1.593e+02 1.913e+02 2.268e+02 4.606e+02, threshold=3.825e+02, percent-clipped=1.0 +2023-04-28 04:33:58,142 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0321, 1.0239, 1.1801, 1.1622, 0.9759, 0.9525, 0.9369, 0.4980], + device='cuda:0'), covar=tensor([0.0567, 0.0514, 0.0384, 0.0463, 0.0661, 0.1110, 0.0428, 0.0571], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0067, 0.0065, 0.0069, 0.0076, 0.0095, 0.0073, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:34:19,352 INFO [finetune.py:976] (0/7) Epoch 29, batch 3050, loss[loss=0.1737, simple_loss=0.2548, pruned_loss=0.04631, over 4897.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.241, pruned_loss=0.04583, over 951423.67 frames. ], batch size: 36, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:34:23,680 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.65 vs. limit=5.0 +2023-04-28 04:34:34,881 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163446.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:34:50,197 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.77 vs. limit=5.0 +2023-04-28 04:34:53,046 INFO [finetune.py:976] (0/7) Epoch 29, batch 3100, loss[loss=0.158, simple_loss=0.2325, pruned_loss=0.04171, over 4895.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2394, pruned_loss=0.04532, over 954183.21 frames. ], batch size: 35, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:34:55,820 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.898e+01 1.464e+02 1.707e+02 2.164e+02 5.622e+02, threshold=3.413e+02, percent-clipped=1.0 +2023-04-28 04:34:59,435 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163483.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:35:10,676 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7693, 1.2029, 1.7891, 2.2007, 1.8140, 1.6897, 1.7525, 1.7428], + device='cuda:0'), covar=tensor([0.4369, 0.6795, 0.5868, 0.5470, 0.5871, 0.7932, 0.7735, 0.8773], + device='cuda:0'), in_proj_covar=tensor([0.0449, 0.0426, 0.0523, 0.0510, 0.0476, 0.0513, 0.0515, 0.0531], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 04:35:11,361 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.56 vs. limit=2.0 +2023-04-28 04:35:27,054 INFO [finetune.py:976] (0/7) Epoch 29, batch 3150, loss[loss=0.1284, simple_loss=0.2086, pruned_loss=0.02415, over 4785.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2364, pruned_loss=0.04438, over 954780.14 frames. ], batch size: 29, lr: 2.84e-03, grad_scale: 32.0 +2023-04-28 04:35:27,167 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.6156, 1.2234, 1.3141, 1.0989, 1.2769, 1.1041, 1.4503, 1.2113], + device='cuda:0'), covar=tensor([0.2207, 0.1407, 0.2998, 0.1678, 0.1070, 0.1612, 0.1263, 0.3445], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0355, 0.0424, 0.0354, 0.0385, 0.0377, 0.0372, 0.0426], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 04:35:28,935 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3941, 3.0782, 0.8628, 1.6982, 1.8034, 2.0697, 1.8334, 0.9557], + device='cuda:0'), covar=tensor([0.1314, 0.1032, 0.1794, 0.1200, 0.0966, 0.1029, 0.1405, 0.1782], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0238, 0.0136, 0.0120, 0.0131, 0.0152, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:35:31,733 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=163531.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:35:59,134 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.19 vs. limit=2.0 +2023-04-28 04:36:10,391 INFO [finetune.py:976] (0/7) Epoch 29, batch 3200, loss[loss=0.1868, simple_loss=0.2527, pruned_loss=0.06041, over 4901.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2335, pruned_loss=0.04381, over 955532.93 frames. ], batch size: 35, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:36:12,739 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.620e+01 1.506e+02 1.753e+02 2.068e+02 8.624e+02, threshold=3.506e+02, percent-clipped=5.0 +2023-04-28 04:36:42,511 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=163596.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:36:58,173 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5489, 3.9565, 0.8320, 2.0855, 2.2123, 2.6179, 2.2810, 0.9680], + device='cuda:0'), covar=tensor([0.1409, 0.0839, 0.2047, 0.1217, 0.1017, 0.1133, 0.1472, 0.2214], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0238, 0.0136, 0.0121, 0.0132, 0.0153, 0.0117, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:37:05,586 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.28 vs. limit=2.0 +2023-04-28 04:37:17,128 INFO [finetune.py:976] (0/7) Epoch 29, batch 3250, loss[loss=0.2428, simple_loss=0.3063, pruned_loss=0.08963, over 4899.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.2355, pruned_loss=0.04476, over 954126.93 frames. ], batch size: 43, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:38:20,406 INFO [finetune.py:976] (0/7) Epoch 29, batch 3300, loss[loss=0.1925, simple_loss=0.2759, pruned_loss=0.05453, over 4850.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2396, pruned_loss=0.0462, over 956624.14 frames. ], batch size: 44, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:38:22,204 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.605e+02 1.841e+02 2.298e+02 3.971e+02, threshold=3.681e+02, percent-clipped=3.0 +2023-04-28 04:39:05,842 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0205, 1.0409, 1.2192, 1.1546, 1.0112, 0.9216, 0.9584, 0.4662], + device='cuda:0'), covar=tensor([0.0556, 0.0489, 0.0431, 0.0516, 0.0686, 0.1101, 0.0418, 0.0644], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0067, 0.0065, 0.0069, 0.0076, 0.0095, 0.0073, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 04:39:15,516 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.76 vs. limit=2.0 +2023-04-28 04:39:22,576 INFO [finetune.py:976] (0/7) Epoch 29, batch 3350, loss[loss=0.1847, simple_loss=0.2534, pruned_loss=0.05802, over 4877.00 frames. ], tot_loss[loss=0.1691, simple_loss=0.2424, pruned_loss=0.04786, over 955552.41 frames. ], batch size: 32, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:39:46,987 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163746.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:40:27,596 INFO [finetune.py:976] (0/7) Epoch 29, batch 3400, loss[loss=0.1371, simple_loss=0.227, pruned_loss=0.02361, over 4873.00 frames. ], tot_loss[loss=0.1698, simple_loss=0.2434, pruned_loss=0.04813, over 955993.16 frames. ], batch size: 34, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:40:29,468 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.152e+02 1.589e+02 1.829e+02 2.322e+02 5.159e+02, threshold=3.657e+02, percent-clipped=5.0 +2023-04-28 04:40:50,061 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=163794.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:41:23,184 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.86 vs. limit=2.0 +2023-04-28 04:41:32,571 INFO [finetune.py:976] (0/7) Epoch 29, batch 3450, loss[loss=0.1608, simple_loss=0.2392, pruned_loss=0.04122, over 4932.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2423, pruned_loss=0.04744, over 956084.82 frames. ], batch size: 33, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:41:44,853 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-28 04:42:36,322 INFO [finetune.py:976] (0/7) Epoch 29, batch 3500, loss[loss=0.1589, simple_loss=0.2277, pruned_loss=0.04503, over 4816.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2396, pruned_loss=0.04615, over 957395.05 frames. ], batch size: 41, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:42:38,149 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.042e+02 1.447e+02 1.753e+02 2.110e+02 3.235e+02, threshold=3.507e+02, percent-clipped=0.0 +2023-04-28 04:42:59,200 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=163896.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:43:28,395 INFO [finetune.py:976] (0/7) Epoch 29, batch 3550, loss[loss=0.1482, simple_loss=0.2219, pruned_loss=0.03721, over 4913.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2371, pruned_loss=0.04541, over 957879.24 frames. ], batch size: 37, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:43:35,162 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.4340, 1.3177, 1.4216, 1.0170, 1.3965, 1.1442, 1.6897, 1.4129], + device='cuda:0'), covar=tensor([0.3977, 0.2102, 0.5504, 0.3054, 0.1708, 0.2428, 0.1939, 0.5008], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0354, 0.0423, 0.0352, 0.0385, 0.0376, 0.0371, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 04:43:39,969 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=163944.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:43:53,013 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=5.00 vs. limit=5.0 +2023-04-28 04:44:01,291 INFO [finetune.py:976] (0/7) Epoch 29, batch 3600, loss[loss=0.1549, simple_loss=0.2295, pruned_loss=0.04015, over 4899.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2353, pruned_loss=0.04475, over 956140.90 frames. ], batch size: 35, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:44:03,550 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.301e+01 1.508e+02 1.790e+02 2.030e+02 3.807e+02, threshold=3.580e+02, percent-clipped=2.0 +2023-04-28 04:44:17,228 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-164000.pt +2023-04-28 04:44:36,729 INFO [finetune.py:976] (0/7) Epoch 29, batch 3650, loss[loss=0.1681, simple_loss=0.2292, pruned_loss=0.05355, over 4307.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2371, pruned_loss=0.04503, over 955407.43 frames. ], batch size: 18, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:44:55,584 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.34 vs. limit=2.0 +2023-04-28 04:45:10,039 INFO [finetune.py:976] (0/7) Epoch 29, batch 3700, loss[loss=0.1249, simple_loss=0.2058, pruned_loss=0.02202, over 4779.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2398, pruned_loss=0.0458, over 954052.15 frames. ], batch size: 26, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:45:11,862 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.127e+02 1.572e+02 1.919e+02 2.476e+02 4.831e+02, threshold=3.838e+02, percent-clipped=5.0 +2023-04-28 04:45:19,676 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164090.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:45:43,377 INFO [finetune.py:976] (0/7) Epoch 29, batch 3750, loss[loss=0.1812, simple_loss=0.2622, pruned_loss=0.05013, over 4896.00 frames. ], tot_loss[loss=0.1673, simple_loss=0.2414, pruned_loss=0.0466, over 953934.32 frames. ], batch size: 43, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:45:50,040 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9504, 2.5754, 1.0570, 1.3498, 1.8648, 1.2289, 3.4457, 1.8405], + device='cuda:0'), covar=tensor([0.0752, 0.0666, 0.0819, 0.1275, 0.0543, 0.1061, 0.0250, 0.0582], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 04:46:11,064 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164151.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:46:13,516 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164155.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:46:41,659 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.97 vs. limit=2.0 +2023-04-28 04:46:43,101 INFO [finetune.py:976] (0/7) Epoch 29, batch 3800, loss[loss=0.2048, simple_loss=0.2864, pruned_loss=0.0616, over 4911.00 frames. ], tot_loss[loss=0.1687, simple_loss=0.2432, pruned_loss=0.04714, over 954321.86 frames. ], batch size: 46, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:46:47,404 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.309e+01 1.497e+02 1.761e+02 2.099e+02 4.096e+02, threshold=3.523e+02, percent-clipped=1.0 +2023-04-28 04:47:17,880 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164216.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:47:34,750 INFO [finetune.py:976] (0/7) Epoch 29, batch 3850, loss[loss=0.1332, simple_loss=0.2167, pruned_loss=0.02487, over 4748.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2419, pruned_loss=0.04624, over 953604.62 frames. ], batch size: 26, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:47:34,874 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2230, 2.2246, 1.8544, 1.8494, 2.3153, 1.9845, 2.8538, 1.7182], + device='cuda:0'), covar=tensor([0.3504, 0.1896, 0.4338, 0.3384, 0.1782, 0.2267, 0.1217, 0.4352], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0353, 0.0422, 0.0352, 0.0384, 0.0374, 0.0370, 0.0423], + device='cuda:0'), out_proj_covar=tensor([9.9997e-05, 1.0494e-04, 1.2775e-04, 1.0517e-04, 1.1355e-04, 1.1104e-04, + 1.0795e-04, 1.2698e-04], device='cuda:0') +2023-04-28 04:47:56,134 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8533, 2.8712, 2.3653, 3.2850, 2.9061, 2.8801, 1.1324, 2.7593], + device='cuda:0'), covar=tensor([0.2279, 0.1630, 0.3134, 0.2944, 0.3261, 0.2246, 0.6236, 0.2971], + device='cuda:0'), in_proj_covar=tensor([0.0245, 0.0220, 0.0252, 0.0305, 0.0300, 0.0250, 0.0275, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 04:47:59,261 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9276, 2.2861, 1.9533, 2.2054, 1.6324, 1.8879, 1.9329, 1.5330], + device='cuda:0'), covar=tensor([0.1583, 0.0996, 0.0782, 0.0921, 0.2766, 0.1030, 0.1664, 0.2016], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0297, 0.0215, 0.0274, 0.0310, 0.0251, 0.0245, 0.0261], + device='cuda:0'), out_proj_covar=tensor([1.1186e-04, 1.1667e-04, 8.4309e-05, 1.0753e-04, 1.2474e-04, 9.8685e-05, + 9.8452e-05, 1.0260e-04], device='cuda:0') +2023-04-28 04:48:38,395 INFO [finetune.py:976] (0/7) Epoch 29, batch 3900, loss[loss=0.1487, simple_loss=0.2262, pruned_loss=0.03561, over 4901.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2393, pruned_loss=0.04535, over 955580.06 frames. ], batch size: 35, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:48:40,204 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.003e+02 1.498e+02 1.753e+02 2.124e+02 6.066e+02, threshold=3.506e+02, percent-clipped=2.0 +2023-04-28 04:49:39,856 INFO [finetune.py:976] (0/7) Epoch 29, batch 3950, loss[loss=0.1451, simple_loss=0.206, pruned_loss=0.04212, over 4740.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2365, pruned_loss=0.04443, over 956048.91 frames. ], batch size: 54, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:50:24,347 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164360.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:50:24,502 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.43 vs. limit=5.0 +2023-04-28 04:50:25,011 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6798, 1.9732, 1.7044, 1.3880, 1.2861, 1.2792, 1.7182, 1.2636], + device='cuda:0'), covar=tensor([0.1770, 0.1438, 0.1531, 0.1897, 0.2401, 0.2114, 0.1065, 0.2192], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0210, 0.0171, 0.0204, 0.0201, 0.0187, 0.0157, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 04:50:45,028 INFO [finetune.py:976] (0/7) Epoch 29, batch 4000, loss[loss=0.142, simple_loss=0.2294, pruned_loss=0.02736, over 4835.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.2347, pruned_loss=0.04371, over 957692.87 frames. ], batch size: 33, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:50:47,328 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.986e+01 1.466e+02 1.722e+02 2.043e+02 3.565e+02, threshold=3.444e+02, percent-clipped=1.0 +2023-04-28 04:51:41,771 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164421.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:51:48,792 INFO [finetune.py:976] (0/7) Epoch 29, batch 4050, loss[loss=0.1829, simple_loss=0.2695, pruned_loss=0.04813, over 4829.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2375, pruned_loss=0.04475, over 957316.87 frames. ], batch size: 40, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:52:20,687 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164446.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:52:55,387 INFO [finetune.py:976] (0/7) Epoch 29, batch 4100, loss[loss=0.2203, simple_loss=0.2889, pruned_loss=0.07581, over 4821.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2399, pruned_loss=0.04533, over 957035.86 frames. ], batch size: 40, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:53:02,655 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.616e+02 1.841e+02 2.144e+02 4.180e+02, threshold=3.683e+02, percent-clipped=3.0 +2023-04-28 04:53:41,164 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164511.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:53:54,830 INFO [finetune.py:976] (0/7) Epoch 29, batch 4150, loss[loss=0.1386, simple_loss=0.2122, pruned_loss=0.03248, over 4644.00 frames. ], tot_loss[loss=0.1668, simple_loss=0.2414, pruned_loss=0.0461, over 954902.85 frames. ], batch size: 23, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:54:35,907 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164568.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:54:40,036 INFO [finetune.py:976] (0/7) Epoch 29, batch 4200, loss[loss=0.1523, simple_loss=0.2271, pruned_loss=0.0388, over 4858.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2421, pruned_loss=0.04611, over 954904.98 frames. ], batch size: 31, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:54:41,956 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164578.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:54:42,442 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.830e+01 1.587e+02 1.797e+02 2.330e+02 9.173e+02, threshold=3.593e+02, percent-clipped=2.0 +2023-04-28 04:55:14,302 INFO [finetune.py:976] (0/7) Epoch 29, batch 4250, loss[loss=0.1725, simple_loss=0.2354, pruned_loss=0.05484, over 4711.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2392, pruned_loss=0.04512, over 954925.37 frames. ], batch size: 23, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:55:16,893 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164629.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:55:24,069 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164639.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 04:55:37,639 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164658.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:55:47,338 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0813, 1.9644, 2.1794, 2.5054, 2.5418, 2.0906, 1.8112, 2.2218], + device='cuda:0'), covar=tensor([0.0714, 0.1023, 0.0658, 0.0475, 0.0507, 0.0810, 0.0661, 0.0504], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0200, 0.0183, 0.0170, 0.0176, 0.0175, 0.0150, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 04:55:48,428 INFO [finetune.py:976] (0/7) Epoch 29, batch 4300, loss[loss=0.1507, simple_loss=0.2254, pruned_loss=0.03803, over 4913.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2363, pruned_loss=0.04454, over 953610.13 frames. ], batch size: 36, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:55:50,845 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.176e+01 1.533e+02 1.704e+02 2.198e+02 4.636e+02, threshold=3.409e+02, percent-clipped=3.0 +2023-04-28 04:56:16,865 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164716.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:56:18,746 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164719.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:56:22,290 INFO [finetune.py:976] (0/7) Epoch 29, batch 4350, loss[loss=0.1825, simple_loss=0.2515, pruned_loss=0.05674, over 4829.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2338, pruned_loss=0.04409, over 952618.21 frames. ], batch size: 39, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:56:36,298 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164746.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:56:36,312 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=164746.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:57:17,722 INFO [finetune.py:976] (0/7) Epoch 29, batch 4400, loss[loss=0.1312, simple_loss=0.2044, pruned_loss=0.02903, over 4734.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2349, pruned_loss=0.04426, over 951936.28 frames. ], batch size: 23, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:57:20,180 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.037e+02 1.542e+02 1.919e+02 2.288e+02 3.219e+02, threshold=3.837e+02, percent-clipped=0.0 +2023-04-28 04:57:41,271 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=164794.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:57:41,440 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.16 vs. limit=2.0 +2023-04-28 04:58:01,568 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=164807.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:58:04,448 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=164811.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:58:04,478 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6679, 1.5023, 1.6539, 2.0096, 2.0707, 1.6225, 1.3215, 1.7594], + device='cuda:0'), covar=tensor([0.0817, 0.1299, 0.0842, 0.0560, 0.0587, 0.0855, 0.0802, 0.0632], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0201, 0.0183, 0.0170, 0.0177, 0.0176, 0.0150, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 04:58:24,168 INFO [finetune.py:976] (0/7) Epoch 29, batch 4450, loss[loss=0.1445, simple_loss=0.2137, pruned_loss=0.03768, over 4768.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2382, pruned_loss=0.04561, over 953685.37 frames. ], batch size: 26, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:58:59,955 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=164859.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 04:59:10,503 INFO [finetune.py:976] (0/7) Epoch 29, batch 4500, loss[loss=0.1842, simple_loss=0.2599, pruned_loss=0.05421, over 4907.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.24, pruned_loss=0.04623, over 953054.98 frames. ], batch size: 36, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 04:59:18,300 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.040e+02 1.480e+02 1.804e+02 2.196e+02 5.471e+02, threshold=3.609e+02, percent-clipped=1.0 +2023-04-28 04:59:59,935 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3979, 1.3554, 1.6599, 1.6303, 1.2727, 1.2115, 1.3748, 0.8924], + device='cuda:0'), covar=tensor([0.0458, 0.0504, 0.0343, 0.0453, 0.0675, 0.0878, 0.0457, 0.0484], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0067, 0.0065, 0.0069, 0.0076, 0.0095, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:00:15,099 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164924.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:00:20,955 INFO [finetune.py:976] (0/7) Epoch 29, batch 4550, loss[loss=0.1859, simple_loss=0.2587, pruned_loss=0.05658, over 4860.00 frames. ], tot_loss[loss=0.1674, simple_loss=0.2413, pruned_loss=0.04676, over 951056.03 frames. ], batch size: 44, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 05:00:32,582 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=164934.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 05:01:05,776 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.4901, 1.3544, 1.4477, 1.0846, 1.3959, 1.3047, 1.7413, 1.3960], + device='cuda:0'), covar=tensor([0.3672, 0.2107, 0.5245, 0.2783, 0.1576, 0.2101, 0.1641, 0.5133], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0355, 0.0426, 0.0354, 0.0386, 0.0375, 0.0373, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:01:20,642 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.79 vs. limit=2.0 +2023-04-28 05:01:26,332 INFO [finetune.py:976] (0/7) Epoch 29, batch 4600, loss[loss=0.1228, simple_loss=0.2009, pruned_loss=0.02239, over 4770.00 frames. ], tot_loss[loss=0.1669, simple_loss=0.2413, pruned_loss=0.04619, over 953980.90 frames. ], batch size: 28, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 05:01:29,216 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.461e+02 1.731e+02 1.968e+02 2.942e+02, threshold=3.463e+02, percent-clipped=1.0 +2023-04-28 05:01:47,582 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.65 vs. limit=5.0 +2023-04-28 05:02:14,411 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165014.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:02:20,921 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165016.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:02:31,924 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.86 vs. limit=5.0 +2023-04-28 05:02:32,274 INFO [finetune.py:976] (0/7) Epoch 29, batch 4650, loss[loss=0.1942, simple_loss=0.2573, pruned_loss=0.06555, over 4150.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2392, pruned_loss=0.04579, over 953726.25 frames. ], batch size: 65, lr: 2.83e-03, grad_scale: 32.0 +2023-04-28 05:03:18,274 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=165064.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:03:37,007 INFO [finetune.py:976] (0/7) Epoch 29, batch 4700, loss[loss=0.1476, simple_loss=0.2177, pruned_loss=0.0387, over 4247.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2355, pruned_loss=0.04454, over 952685.84 frames. ], batch size: 65, lr: 2.83e-03, grad_scale: 16.0 +2023-04-28 05:03:40,047 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.099e+02 1.538e+02 1.838e+02 2.257e+02 4.770e+02, threshold=3.676e+02, percent-clipped=2.0 +2023-04-28 05:04:09,162 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165102.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:04:40,800 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165123.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:04:42,451 INFO [finetune.py:976] (0/7) Epoch 29, batch 4750, loss[loss=0.1525, simple_loss=0.2213, pruned_loss=0.04183, over 4735.00 frames. ], tot_loss[loss=0.1603, simple_loss=0.2331, pruned_loss=0.04377, over 953138.11 frames. ], batch size: 54, lr: 2.83e-03, grad_scale: 16.0 +2023-04-28 05:04:59,219 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9918, 1.1314, 1.7150, 1.8021, 1.7466, 1.7789, 1.6697, 1.6728], + device='cuda:0'), covar=tensor([0.4080, 0.5000, 0.3831, 0.3916, 0.5176, 0.6738, 0.4649, 0.4297], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0374, 0.0331, 0.0343, 0.0352, 0.0394, 0.0364, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 05:05:24,301 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2341, 1.4132, 1.7267, 1.8277, 1.6617, 1.7091, 1.7324, 1.7442], + device='cuda:0'), covar=tensor([0.3622, 0.4969, 0.3889, 0.4126, 0.4991, 0.6441, 0.4611, 0.4100], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0377, 0.0333, 0.0345, 0.0354, 0.0396, 0.0365, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 05:05:25,309 INFO [finetune.py:976] (0/7) Epoch 29, batch 4800, loss[loss=0.1997, simple_loss=0.2872, pruned_loss=0.05605, over 4893.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2372, pruned_loss=0.04528, over 955356.95 frames. ], batch size: 43, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:05:28,761 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.123e+02 1.552e+02 1.836e+02 2.145e+02 4.672e+02, threshold=3.672e+02, percent-clipped=1.0 +2023-04-28 05:05:31,322 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165184.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:05:39,133 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7651, 1.3745, 1.8039, 2.2324, 1.8620, 1.7197, 1.7946, 1.7246], + device='cuda:0'), covar=tensor([0.3931, 0.6341, 0.5841, 0.4986, 0.5157, 0.7225, 0.7323, 0.9075], + device='cuda:0'), in_proj_covar=tensor([0.0445, 0.0424, 0.0520, 0.0509, 0.0474, 0.0513, 0.0515, 0.0531], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:05:57,712 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165224.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:05:58,222 INFO [finetune.py:976] (0/7) Epoch 29, batch 4850, loss[loss=0.1999, simple_loss=0.2696, pruned_loss=0.06506, over 4911.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2405, pruned_loss=0.04621, over 953185.55 frames. ], batch size: 36, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:06:05,319 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165234.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 05:06:13,884 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.25 vs. limit=5.0 +2023-04-28 05:06:29,511 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=165272.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:06:31,765 INFO [finetune.py:976] (0/7) Epoch 29, batch 4900, loss[loss=0.1384, simple_loss=0.2248, pruned_loss=0.02597, over 4770.00 frames. ], tot_loss[loss=0.1665, simple_loss=0.2413, pruned_loss=0.04584, over 954896.17 frames. ], batch size: 29, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:06:35,317 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.134e+02 1.561e+02 1.845e+02 2.104e+02 4.657e+02, threshold=3.691e+02, percent-clipped=1.0 +2023-04-28 05:06:37,058 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=165282.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:06:53,363 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.69 vs. limit=2.0 +2023-04-28 05:06:57,493 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165314.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:07:04,963 INFO [finetune.py:976] (0/7) Epoch 29, batch 4950, loss[loss=0.136, simple_loss=0.1934, pruned_loss=0.03934, over 4164.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2435, pruned_loss=0.04683, over 954231.78 frames. ], batch size: 18, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:07:29,753 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=165362.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:07:32,219 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9097, 1.9242, 1.9812, 1.6047, 2.0500, 1.7186, 2.6487, 1.7215], + device='cuda:0'), covar=tensor([0.3976, 0.1941, 0.4234, 0.2947, 0.1704, 0.2491, 0.1347, 0.4335], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0356, 0.0426, 0.0353, 0.0386, 0.0377, 0.0372, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:07:38,073 INFO [finetune.py:976] (0/7) Epoch 29, batch 5000, loss[loss=0.1353, simple_loss=0.2103, pruned_loss=0.03019, over 4911.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.2415, pruned_loss=0.04643, over 951865.09 frames. ], batch size: 43, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:07:41,608 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.839e+01 1.556e+02 1.839e+02 2.298e+02 4.035e+02, threshold=3.677e+02, percent-clipped=3.0 +2023-04-28 05:07:57,188 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165402.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:08:01,434 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-28 05:08:12,132 INFO [finetune.py:976] (0/7) Epoch 29, batch 5050, loss[loss=0.1427, simple_loss=0.218, pruned_loss=0.03372, over 4891.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2382, pruned_loss=0.04521, over 951449.55 frames. ], batch size: 32, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:08:29,758 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=165450.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:08:45,559 INFO [finetune.py:976] (0/7) Epoch 29, batch 5100, loss[loss=0.1285, simple_loss=0.1954, pruned_loss=0.03081, over 4712.00 frames. ], tot_loss[loss=0.1609, simple_loss=0.2341, pruned_loss=0.04389, over 951387.39 frames. ], batch size: 23, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:08:48,554 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165479.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:08:49,074 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.177e+01 1.550e+02 1.878e+02 2.337e+02 3.681e+02, threshold=3.756e+02, percent-clipped=1.0 +2023-04-28 05:09:18,724 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1602, 2.0784, 1.9599, 1.8114, 2.3195, 1.9079, 2.8014, 1.7599], + device='cuda:0'), covar=tensor([0.3656, 0.2308, 0.4608, 0.3089, 0.1728, 0.2593, 0.1344, 0.4782], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0353, 0.0423, 0.0351, 0.0383, 0.0374, 0.0369, 0.0422], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:09:47,677 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-28 05:09:48,070 INFO [finetune.py:976] (0/7) Epoch 29, batch 5150, loss[loss=0.1772, simple_loss=0.2422, pruned_loss=0.05611, over 4928.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2357, pruned_loss=0.0449, over 951951.92 frames. ], batch size: 33, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:10:21,697 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165551.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:10:22,431 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.97 vs. limit=5.0 +2023-04-28 05:10:52,970 INFO [finetune.py:976] (0/7) Epoch 29, batch 5200, loss[loss=0.1666, simple_loss=0.2461, pruned_loss=0.04354, over 4918.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2382, pruned_loss=0.04552, over 953230.22 frames. ], batch size: 36, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:10:55,996 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.013e+02 1.620e+02 1.941e+02 2.269e+02 3.767e+02, threshold=3.883e+02, percent-clipped=2.0 +2023-04-28 05:11:28,579 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2768, 1.4897, 1.7217, 1.8462, 1.7400, 1.7898, 1.7619, 1.7478], + device='cuda:0'), covar=tensor([0.3847, 0.5029, 0.4013, 0.4133, 0.5019, 0.6506, 0.4657, 0.4326], + device='cuda:0'), in_proj_covar=tensor([0.0344, 0.0374, 0.0332, 0.0344, 0.0352, 0.0394, 0.0363, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 05:11:40,321 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165612.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:11:59,706 INFO [finetune.py:976] (0/7) Epoch 29, batch 5250, loss[loss=0.2019, simple_loss=0.2787, pruned_loss=0.06252, over 4746.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2404, pruned_loss=0.0461, over 952413.64 frames. ], batch size: 59, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:12:31,662 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.93 vs. limit=2.0 +2023-04-28 05:13:06,884 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.5561, 1.4031, 1.4861, 1.1629, 1.5433, 1.2892, 1.8337, 1.3449], + device='cuda:0'), covar=tensor([0.3354, 0.1948, 0.4531, 0.2282, 0.1361, 0.2014, 0.1519, 0.4634], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0356, 0.0425, 0.0353, 0.0386, 0.0377, 0.0371, 0.0426], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:13:07,359 INFO [finetune.py:976] (0/7) Epoch 29, batch 5300, loss[loss=0.2327, simple_loss=0.2898, pruned_loss=0.08781, over 4861.00 frames. ], tot_loss[loss=0.1686, simple_loss=0.2424, pruned_loss=0.04742, over 953143.38 frames. ], batch size: 34, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:13:16,015 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.838e+01 1.557e+02 1.826e+02 2.262e+02 6.421e+02, threshold=3.651e+02, percent-clipped=2.0 +2023-04-28 05:13:49,006 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.40 vs. limit=2.0 +2023-04-28 05:14:12,923 INFO [finetune.py:976] (0/7) Epoch 29, batch 5350, loss[loss=0.225, simple_loss=0.2825, pruned_loss=0.0837, over 4270.00 frames. ], tot_loss[loss=0.168, simple_loss=0.2423, pruned_loss=0.04681, over 951450.45 frames. ], batch size: 66, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:14:43,456 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165750.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:15:14,012 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.85 vs. limit=2.0 +2023-04-28 05:15:21,307 INFO [finetune.py:976] (0/7) Epoch 29, batch 5400, loss[loss=0.1392, simple_loss=0.2124, pruned_loss=0.033, over 4820.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.24, pruned_loss=0.04623, over 953360.24 frames. ], batch size: 41, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:15:22,640 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7278, 1.5469, 0.8895, 1.4341, 1.7014, 1.5613, 1.4477, 1.5518], + device='cuda:0'), covar=tensor([0.0463, 0.0376, 0.0328, 0.0529, 0.0258, 0.0485, 0.0465, 0.0542], + device='cuda:0'), in_proj_covar=tensor([0.0028, 0.0023, 0.0022, 0.0028, 0.0019, 0.0028, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0047, 0.0039, 0.0054, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 05:15:24,333 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=165779.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:15:24,845 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.500e+02 1.780e+02 2.167e+02 4.402e+02, threshold=3.560e+02, percent-clipped=1.0 +2023-04-28 05:15:35,291 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.09 vs. limit=2.0 +2023-04-28 05:15:55,101 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=165803.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:16:07,206 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165811.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:16:21,414 INFO [finetune.py:976] (0/7) Epoch 29, batch 5450, loss[loss=0.1292, simple_loss=0.2064, pruned_loss=0.02604, over 4883.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2363, pruned_loss=0.04469, over 954792.57 frames. ], batch size: 31, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:16:22,707 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=165827.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:16:48,223 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=165864.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:16:55,277 INFO [finetune.py:976] (0/7) Epoch 29, batch 5500, loss[loss=0.1291, simple_loss=0.1987, pruned_loss=0.02979, over 4831.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2335, pruned_loss=0.04399, over 957135.97 frames. ], batch size: 30, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:16:58,258 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.287e+01 1.513e+02 1.807e+02 2.218e+02 5.669e+02, threshold=3.614e+02, percent-clipped=2.0 +2023-04-28 05:17:16,262 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=165907.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:17:29,076 INFO [finetune.py:976] (0/7) Epoch 29, batch 5550, loss[loss=0.1739, simple_loss=0.2458, pruned_loss=0.05097, over 4829.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2367, pruned_loss=0.04537, over 958168.83 frames. ], batch size: 30, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:18:28,715 INFO [finetune.py:976] (0/7) Epoch 29, batch 5600, loss[loss=0.2109, simple_loss=0.2753, pruned_loss=0.07323, over 4834.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2399, pruned_loss=0.04596, over 956807.89 frames. ], batch size: 33, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:18:37,370 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.927e+01 1.529e+02 1.829e+02 2.121e+02 4.998e+02, threshold=3.659e+02, percent-clipped=4.0 +2023-04-28 05:18:48,677 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9734, 2.4149, 1.0646, 1.3743, 1.8758, 1.2377, 3.3124, 1.5808], + device='cuda:0'), covar=tensor([0.0646, 0.0545, 0.0719, 0.1166, 0.0446, 0.0986, 0.0210, 0.0593], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 05:18:59,082 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-166000.pt +2023-04-28 05:19:06,407 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5118, 1.5523, 4.1570, 3.9580, 3.6661, 4.0078, 3.9563, 3.6462], + device='cuda:0'), covar=tensor([0.7192, 0.5088, 0.1075, 0.1509, 0.1060, 0.1755, 0.1276, 0.1646], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0309, 0.0407, 0.0410, 0.0351, 0.0414, 0.0317, 0.0363], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:19:32,708 INFO [finetune.py:976] (0/7) Epoch 29, batch 5650, loss[loss=0.2004, simple_loss=0.2659, pruned_loss=0.06747, over 4893.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.242, pruned_loss=0.04618, over 959079.73 frames. ], batch size: 32, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:20:11,612 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166054.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 05:20:33,889 INFO [finetune.py:976] (0/7) Epoch 29, batch 5700, loss[loss=0.113, simple_loss=0.1874, pruned_loss=0.01933, over 4091.00 frames. ], tot_loss[loss=0.1653, simple_loss=0.2388, pruned_loss=0.04587, over 941033.72 frames. ], batch size: 17, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:20:42,336 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 6.981e+01 1.433e+02 1.713e+02 2.148e+02 4.581e+02, threshold=3.425e+02, percent-clipped=1.0 +2023-04-28 05:20:43,699 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([3.1118, 2.7992, 3.0175, 3.6190, 2.9233, 2.5951, 2.6836, 3.0779], + device='cuda:0'), covar=tensor([0.2725, 0.2442, 0.1446, 0.1958, 0.2093, 0.2251, 0.3064, 0.1507], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0247, 0.0229, 0.0316, 0.0223, 0.0237, 0.0230, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 05:20:44,223 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.0209, 4.0220, 3.0138, 4.6351, 4.1002, 3.9321, 2.1792, 3.9221], + device='cuda:0'), covar=tensor([0.1657, 0.1183, 0.2995, 0.1499, 0.3350, 0.1819, 0.5122, 0.2644], + device='cuda:0'), in_proj_covar=tensor([0.0246, 0.0218, 0.0252, 0.0305, 0.0300, 0.0250, 0.0274, 0.0275], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 05:20:47,393 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.25 vs. limit=2.0 +2023-04-28 05:20:54,659 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-28 05:21:07,610 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-29.pt +2023-04-28 05:21:19,172 INFO [finetune.py:976] (0/7) Epoch 30, batch 0, loss[loss=0.1672, simple_loss=0.242, pruned_loss=0.04623, over 4857.00 frames. ], tot_loss[loss=0.1672, simple_loss=0.242, pruned_loss=0.04623, over 4857.00 frames. ], batch size: 34, lr: 2.82e-03, grad_scale: 16.0 +2023-04-28 05:21:19,174 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-28 05:21:20,959 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7872, 1.0818, 1.7964, 2.2598, 1.8592, 1.7069, 1.7369, 1.7496], + device='cuda:0'), covar=tensor([0.4553, 0.7224, 0.6858, 0.5822, 0.6117, 0.8182, 0.8598, 0.9004], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0427, 0.0523, 0.0511, 0.0477, 0.0517, 0.0519, 0.0533], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:21:26,363 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3073, 1.6050, 1.8653, 2.0057, 1.9836, 2.0070, 1.8715, 1.9057], + device='cuda:0'), covar=tensor([0.3715, 0.5178, 0.4358, 0.4400, 0.4988, 0.6615, 0.5498, 0.4888], + device='cuda:0'), in_proj_covar=tensor([0.0347, 0.0377, 0.0333, 0.0346, 0.0354, 0.0396, 0.0365, 0.0336], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 05:21:35,743 INFO [finetune.py:1010] (0/7) Epoch 30, validation: loss=0.1551, simple_loss=0.2236, pruned_loss=0.04334, over 2265189.00 frames. +2023-04-28 05:21:35,745 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-28 05:21:37,612 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166106.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:21:48,577 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166115.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 05:22:23,303 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.73 vs. limit=2.0 +2023-04-28 05:22:31,241 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166147.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:22:35,225 INFO [finetune.py:976] (0/7) Epoch 30, batch 50, loss[loss=0.1756, simple_loss=0.2513, pruned_loss=0.04996, over 4817.00 frames. ], tot_loss[loss=0.1729, simple_loss=0.246, pruned_loss=0.04989, over 217456.56 frames. ], batch size: 38, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:22:41,887 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.88 vs. limit=2.0 +2023-04-28 05:22:42,442 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.63 vs. limit=2.0 +2023-04-28 05:22:46,130 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166159.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:23:14,383 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 7.993e+01 1.354e+02 1.707e+02 2.159e+02 3.038e+02, threshold=3.414e+02, percent-clipped=0.0 +2023-04-28 05:23:44,643 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8578, 1.5729, 1.7478, 2.1335, 2.2021, 1.7946, 1.4278, 1.9132], + device='cuda:0'), covar=tensor([0.0751, 0.1242, 0.0781, 0.0546, 0.0606, 0.0747, 0.0787, 0.0531], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0202, 0.0183, 0.0170, 0.0177, 0.0177, 0.0149, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:23:47,366 INFO [finetune.py:976] (0/7) Epoch 30, batch 100, loss[loss=0.16, simple_loss=0.2288, pruned_loss=0.04561, over 4781.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2355, pruned_loss=0.04533, over 379523.39 frames. ], batch size: 26, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:23:50,447 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166207.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:23:56,837 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166208.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:24:41,726 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.12 vs. limit=2.0 +2023-04-28 05:24:53,828 INFO [finetune.py:976] (0/7) Epoch 30, batch 150, loss[loss=0.1726, simple_loss=0.2293, pruned_loss=0.05798, over 4134.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2318, pruned_loss=0.04451, over 507689.35 frames. ], batch size: 18, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:24:55,117 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=166255.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:25:28,830 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.799e+01 1.515e+02 1.784e+02 2.130e+02 3.221e+02, threshold=3.568e+02, percent-clipped=0.0 +2023-04-28 05:25:59,857 INFO [finetune.py:976] (0/7) Epoch 30, batch 200, loss[loss=0.125, simple_loss=0.2073, pruned_loss=0.02138, over 4771.00 frames. ], tot_loss[loss=0.16, simple_loss=0.231, pruned_loss=0.04448, over 606689.74 frames. ], batch size: 26, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:26:02,454 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166307.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:26:40,368 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.41 vs. limit=2.0 +2023-04-28 05:27:02,967 INFO [finetune.py:976] (0/7) Epoch 30, batch 250, loss[loss=0.1867, simple_loss=0.2683, pruned_loss=0.05257, over 4829.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2331, pruned_loss=0.04473, over 685202.72 frames. ], batch size: 40, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:27:08,085 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8163, 1.3544, 1.4830, 1.5276, 1.9116, 1.5650, 1.3630, 1.4209], + device='cuda:0'), covar=tensor([0.1383, 0.1378, 0.1863, 0.1279, 0.0822, 0.1302, 0.1644, 0.2111], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0310, 0.0350, 0.0288, 0.0327, 0.0307, 0.0302, 0.0379], + device='cuda:0'), out_proj_covar=tensor([6.4388e-05, 6.3513e-05, 7.3268e-05, 5.7472e-05, 6.6625e-05, 6.3545e-05, + 6.2383e-05, 8.0131e-05], device='cuda:0') +2023-04-28 05:27:19,829 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166368.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:27:20,486 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6837, 1.5373, 1.9269, 1.9613, 1.4529, 1.3259, 1.6408, 0.9440], + device='cuda:0'), covar=tensor([0.0454, 0.0719, 0.0392, 0.0597, 0.0690, 0.1122, 0.0538, 0.0568], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0067, 0.0065, 0.0069, 0.0076, 0.0095, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:27:38,574 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.540e+01 1.580e+02 1.903e+02 2.256e+02 3.495e+02, threshold=3.807e+02, percent-clipped=0.0 +2023-04-28 05:28:02,277 INFO [finetune.py:976] (0/7) Epoch 30, batch 300, loss[loss=0.1841, simple_loss=0.2438, pruned_loss=0.06217, over 4884.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.236, pruned_loss=0.04534, over 744969.32 frames. ], batch size: 32, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:28:07,537 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166406.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:28:09,972 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166410.0, num_to_drop=1, layers_to_drop={3} +2023-04-28 05:29:01,972 INFO [finetune.py:976] (0/7) Epoch 30, batch 350, loss[loss=0.1787, simple_loss=0.2475, pruned_loss=0.05499, over 4861.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2385, pruned_loss=0.0464, over 790303.31 frames. ], batch size: 31, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:29:02,685 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=166454.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:29:11,880 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166459.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:29:31,635 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.632e+01 1.563e+02 1.883e+02 2.303e+02 4.800e+02, threshold=3.766e+02, percent-clipped=2.0 +2023-04-28 05:29:32,377 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166481.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:29:46,146 INFO [finetune.py:976] (0/7) Epoch 30, batch 400, loss[loss=0.2007, simple_loss=0.2655, pruned_loss=0.06794, over 4892.00 frames. ], tot_loss[loss=0.1678, simple_loss=0.2414, pruned_loss=0.04713, over 828754.90 frames. ], batch size: 35, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:29:46,215 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166503.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:29:49,093 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=166507.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:29:50,822 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166509.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:29:50,900 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.15 vs. limit=2.0 +2023-04-28 05:30:06,151 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166531.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:30:13,237 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166542.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:30:19,818 INFO [finetune.py:976] (0/7) Epoch 30, batch 450, loss[loss=0.1816, simple_loss=0.2632, pruned_loss=0.04998, over 4907.00 frames. ], tot_loss[loss=0.1666, simple_loss=0.2401, pruned_loss=0.04656, over 858100.47 frames. ], batch size: 36, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:30:31,301 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166570.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:30:37,700 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2831, 1.7225, 2.1106, 2.5705, 2.0257, 1.6470, 1.4020, 1.9703], + device='cuda:0'), covar=tensor([0.2695, 0.3001, 0.1599, 0.1886, 0.2345, 0.2553, 0.3867, 0.1773], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0248, 0.0229, 0.0316, 0.0223, 0.0238, 0.0230, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 05:30:39,258 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.011e+02 1.427e+02 1.810e+02 2.217e+02 8.020e+02, threshold=3.620e+02, percent-clipped=2.0 +2023-04-28 05:30:47,077 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166592.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:30:53,781 INFO [finetune.py:976] (0/7) Epoch 30, batch 500, loss[loss=0.1465, simple_loss=0.2171, pruned_loss=0.03796, over 4764.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2376, pruned_loss=0.04583, over 880511.40 frames. ], batch size: 28, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:31:07,079 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4589, 1.2277, 4.0878, 3.8752, 3.6133, 3.9275, 3.8408, 3.6563], + device='cuda:0'), covar=tensor([0.7356, 0.5937, 0.1115, 0.1660, 0.1167, 0.2063, 0.1866, 0.1505], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0311, 0.0409, 0.0412, 0.0353, 0.0416, 0.0319, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:31:27,593 INFO [finetune.py:976] (0/7) Epoch 30, batch 550, loss[loss=0.177, simple_loss=0.244, pruned_loss=0.055, over 4829.00 frames. ], tot_loss[loss=0.1622, simple_loss=0.2347, pruned_loss=0.04485, over 897216.46 frames. ], batch size: 30, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:31:34,195 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166663.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:31:45,971 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.022e+02 1.489e+02 1.745e+02 2.160e+02 4.947e+02, threshold=3.489e+02, percent-clipped=2.0 +2023-04-28 05:32:01,358 INFO [finetune.py:976] (0/7) Epoch 30, batch 600, loss[loss=0.1537, simple_loss=0.2343, pruned_loss=0.03651, over 4783.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2349, pruned_loss=0.04418, over 912428.00 frames. ], batch size: 29, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:32:05,744 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166710.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 05:32:07,104 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.38 vs. limit=2.0 +2023-04-28 05:32:23,043 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166735.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:32:34,881 INFO [finetune.py:976] (0/7) Epoch 30, batch 650, loss[loss=0.1569, simple_loss=0.2201, pruned_loss=0.04689, over 4902.00 frames. ], tot_loss[loss=0.164, simple_loss=0.2381, pruned_loss=0.0449, over 921652.16 frames. ], batch size: 35, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:32:37,944 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=166758.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 05:32:47,100 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166764.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:32:55,015 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8188, 1.3069, 1.8879, 2.2733, 1.8825, 1.7674, 1.8108, 1.7780], + device='cuda:0'), covar=tensor([0.4620, 0.7271, 0.6527, 0.5484, 0.6105, 0.8442, 0.8422, 0.9523], + device='cuda:0'), in_proj_covar=tensor([0.0449, 0.0426, 0.0523, 0.0510, 0.0476, 0.0516, 0.0519, 0.0532], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:33:07,160 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.129e+02 1.523e+02 1.858e+02 2.285e+02 4.759e+02, threshold=3.715e+02, percent-clipped=1.0 +2023-04-28 05:33:29,727 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166796.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:33:39,478 INFO [finetune.py:976] (0/7) Epoch 30, batch 700, loss[loss=0.1821, simple_loss=0.2564, pruned_loss=0.05389, over 4832.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2391, pruned_loss=0.0452, over 926666.67 frames. ], batch size: 49, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:33:39,580 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166803.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:34:03,936 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166825.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:34:22,180 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166837.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:34:37,355 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=166851.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:34:43,122 INFO [finetune.py:976] (0/7) Epoch 30, batch 750, loss[loss=0.1343, simple_loss=0.2202, pruned_loss=0.0242, over 4857.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2396, pruned_loss=0.04509, over 934802.18 frames. ], batch size: 44, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:34:51,920 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166865.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:35:11,848 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.075e+02 1.441e+02 1.685e+02 2.007e+02 5.927e+02, threshold=3.370e+02, percent-clipped=1.0 +2023-04-28 05:35:21,895 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=166887.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:35:44,637 INFO [finetune.py:976] (0/7) Epoch 30, batch 800, loss[loss=0.1843, simple_loss=0.2474, pruned_loss=0.06065, over 4775.00 frames. ], tot_loss[loss=0.1658, simple_loss=0.2405, pruned_loss=0.0455, over 939840.52 frames. ], batch size: 29, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:36:17,511 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=166929.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:36:38,366 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.5206, 1.3856, 1.4216, 1.0542, 1.4253, 1.1568, 1.6917, 1.3402], + device='cuda:0'), covar=tensor([0.3128, 0.1782, 0.4247, 0.2516, 0.1401, 0.2117, 0.1550, 0.4301], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0356, 0.0427, 0.0353, 0.0388, 0.0377, 0.0373, 0.0427], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:36:49,753 INFO [finetune.py:976] (0/7) Epoch 30, batch 850, loss[loss=0.1739, simple_loss=0.2451, pruned_loss=0.0514, over 4749.00 frames. ], tot_loss[loss=0.1636, simple_loss=0.2377, pruned_loss=0.0448, over 944465.69 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:36:56,523 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.05 vs. limit=5.0 +2023-04-28 05:36:59,958 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=166963.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:37:11,769 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5908, 2.9549, 0.9267, 1.4612, 2.2582, 1.5821, 4.2220, 1.7918], + device='cuda:0'), covar=tensor([0.0594, 0.0801, 0.0901, 0.1339, 0.0535, 0.0997, 0.0212, 0.0692], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0051], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 05:37:20,967 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.032e+02 1.548e+02 1.839e+02 2.172e+02 6.436e+02, threshold=3.678e+02, percent-clipped=1.0 +2023-04-28 05:37:33,075 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=166990.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:37:42,685 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.0022, 0.6495, 0.8699, 0.7770, 1.0718, 0.9517, 0.8904, 0.8560], + device='cuda:0'), covar=tensor([0.1245, 0.1381, 0.1385, 0.1271, 0.0853, 0.1155, 0.1208, 0.1738], + device='cuda:0'), in_proj_covar=tensor([0.0316, 0.0309, 0.0350, 0.0287, 0.0325, 0.0305, 0.0301, 0.0378], + device='cuda:0'), out_proj_covar=tensor([6.4217e-05, 6.3167e-05, 7.3301e-05, 5.7265e-05, 6.6175e-05, 6.3244e-05, + 6.1999e-05, 7.9834e-05], device='cuda:0') +2023-04-28 05:37:47,445 INFO [finetune.py:976] (0/7) Epoch 30, batch 900, loss[loss=0.1554, simple_loss=0.2283, pruned_loss=0.04127, over 4900.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2352, pruned_loss=0.04421, over 944055.34 frames. ], batch size: 43, lr: 2.81e-03, grad_scale: 16.0 +2023-04-28 05:37:52,358 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=167011.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:38:21,329 INFO [finetune.py:976] (0/7) Epoch 30, batch 950, loss[loss=0.1657, simple_loss=0.2356, pruned_loss=0.04794, over 4922.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2349, pruned_loss=0.04458, over 946981.22 frames. ], batch size: 37, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:38:33,389 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0044, 2.4700, 2.1497, 2.4030, 1.8011, 2.1832, 1.9737, 1.5723], + device='cuda:0'), covar=tensor([0.1990, 0.1244, 0.0781, 0.1092, 0.3106, 0.1167, 0.2022, 0.2605], + device='cuda:0'), in_proj_covar=tensor([0.0283, 0.0303, 0.0218, 0.0276, 0.0312, 0.0255, 0.0250, 0.0263], + device='cuda:0'), out_proj_covar=tensor([1.1285e-04, 1.1907e-04, 8.5574e-05, 1.0836e-04, 1.2546e-04, 1.0028e-04, + 1.0060e-04, 1.0363e-04], device='cuda:0') +2023-04-28 05:38:38,159 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.089e+02 1.627e+02 1.847e+02 2.138e+02 3.460e+02, threshold=3.694e+02, percent-clipped=0.0 +2023-04-28 05:38:45,982 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167091.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:38:52,932 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167100.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:38:55,141 INFO [finetune.py:976] (0/7) Epoch 30, batch 1000, loss[loss=0.2119, simple_loss=0.2885, pruned_loss=0.06768, over 4827.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2365, pruned_loss=0.04487, over 948769.90 frames. ], batch size: 39, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:38:55,864 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167104.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:39:06,115 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167120.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:39:17,577 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167137.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:39:28,662 INFO [finetune.py:976] (0/7) Epoch 30, batch 1050, loss[loss=0.1754, simple_loss=0.2449, pruned_loss=0.05291, over 4819.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.239, pruned_loss=0.04532, over 949419.80 frames. ], batch size: 39, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:39:29,876 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3499, 2.2317, 1.8165, 1.9132, 2.3044, 1.9154, 2.7340, 1.6085], + device='cuda:0'), covar=tensor([0.3583, 0.1934, 0.4407, 0.3177, 0.1716, 0.2442, 0.1583, 0.4395], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0355, 0.0425, 0.0353, 0.0386, 0.0375, 0.0371, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:39:34,162 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167161.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:39:36,562 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167165.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:39:36,604 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167165.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:39:46,131 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.104e+02 1.567e+02 1.873e+02 2.278e+02 3.738e+02, threshold=3.746e+02, percent-clipped=2.0 +2023-04-28 05:39:49,157 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=167185.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:39:50,865 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167187.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:40:01,670 INFO [finetune.py:976] (0/7) Epoch 30, batch 1100, loss[loss=0.1575, simple_loss=0.2391, pruned_loss=0.03795, over 4814.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2401, pruned_loss=0.04547, over 950553.00 frames. ], batch size: 51, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:40:08,805 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=167213.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:40:08,873 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9971, 1.0164, 1.2007, 1.1486, 0.9929, 0.8936, 0.9613, 0.5096], + device='cuda:0'), covar=tensor([0.0497, 0.0511, 0.0418, 0.0522, 0.0615, 0.1159, 0.0453, 0.0587], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0067, 0.0065, 0.0069, 0.0076, 0.0095, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:40:15,591 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4673, 1.9669, 2.3858, 2.9308, 2.3736, 1.8573, 1.9621, 2.2472], + device='cuda:0'), covar=tensor([0.2967, 0.2882, 0.1479, 0.2423, 0.2758, 0.2469, 0.3679, 0.1977], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0246, 0.0228, 0.0315, 0.0222, 0.0236, 0.0229, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 05:40:20,281 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9875, 4.3654, 1.0895, 2.2615, 2.8505, 3.0750, 2.5646, 1.0316], + device='cuda:0'), covar=tensor([0.1382, 0.0912, 0.2046, 0.1178, 0.0904, 0.1009, 0.1466, 0.2255], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0240, 0.0137, 0.0121, 0.0133, 0.0154, 0.0119, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:40:22,662 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=167235.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:40:34,490 INFO [finetune.py:976] (0/7) Epoch 30, batch 1150, loss[loss=0.1684, simple_loss=0.2455, pruned_loss=0.04563, over 4732.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2405, pruned_loss=0.04569, over 950004.82 frames. ], batch size: 54, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:41:03,288 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.007e+02 1.590e+02 1.832e+02 2.168e+02 3.581e+02, threshold=3.663e+02, percent-clipped=0.0 +2023-04-28 05:41:11,483 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167285.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:41:33,519 INFO [finetune.py:976] (0/7) Epoch 30, batch 1200, loss[loss=0.1629, simple_loss=0.239, pruned_loss=0.04346, over 4246.00 frames. ], tot_loss[loss=0.1664, simple_loss=0.2406, pruned_loss=0.04605, over 951793.89 frames. ], batch size: 66, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:41:43,239 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.83 vs. limit=2.0 +2023-04-28 05:42:37,506 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.18 vs. limit=2.0 +2023-04-28 05:42:37,938 INFO [finetune.py:976] (0/7) Epoch 30, batch 1250, loss[loss=0.153, simple_loss=0.2307, pruned_loss=0.0377, over 4766.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.239, pruned_loss=0.04566, over 954063.03 frames. ], batch size: 27, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:42:38,158 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.42 vs. limit=2.0 +2023-04-28 05:42:39,885 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5725, 1.2552, 4.1466, 3.8849, 3.5567, 3.8759, 3.8298, 3.6404], + device='cuda:0'), covar=tensor([0.7114, 0.5859, 0.1171, 0.1772, 0.1251, 0.1866, 0.1840, 0.1529], + device='cuda:0'), in_proj_covar=tensor([0.0309, 0.0310, 0.0407, 0.0409, 0.0352, 0.0414, 0.0316, 0.0364], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:43:19,233 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 7.674e+01 1.528e+02 1.825e+02 2.243e+02 4.898e+02, threshold=3.650e+02, percent-clipped=3.0 +2023-04-28 05:43:32,143 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167391.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:43:42,858 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1770, 2.6085, 1.0757, 1.4478, 1.9625, 1.2899, 3.5036, 1.8037], + device='cuda:0'), covar=tensor([0.0666, 0.0588, 0.0767, 0.1236, 0.0511, 0.1000, 0.0194, 0.0629], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0073, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 05:43:45,351 INFO [finetune.py:976] (0/7) Epoch 30, batch 1300, loss[loss=0.1656, simple_loss=0.2393, pruned_loss=0.04596, over 4899.00 frames. ], tot_loss[loss=0.162, simple_loss=0.2356, pruned_loss=0.04417, over 955727.27 frames. ], batch size: 32, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:44:12,873 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167420.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:44:25,761 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167432.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:44:35,810 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=167439.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:44:45,733 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4121, 1.4056, 1.6695, 1.7115, 1.2887, 1.1795, 1.4658, 0.9666], + device='cuda:0'), covar=tensor([0.0558, 0.0455, 0.0411, 0.0375, 0.0569, 0.0776, 0.0419, 0.0502], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0067, 0.0065, 0.0068, 0.0075, 0.0094, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:44:56,435 INFO [finetune.py:976] (0/7) Epoch 30, batch 1350, loss[loss=0.2206, simple_loss=0.2877, pruned_loss=0.0768, over 4052.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2366, pruned_loss=0.04454, over 955530.03 frames. ], batch size: 65, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:44:58,337 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167456.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:45:00,776 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167460.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:45:12,123 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=167468.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:45:25,590 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.814e+01 1.503e+02 1.795e+02 2.104e+02 5.763e+02, threshold=3.590e+02, percent-clipped=1.0 +2023-04-28 05:45:34,218 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167493.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:45:41,215 INFO [finetune.py:976] (0/7) Epoch 30, batch 1400, loss[loss=0.1326, simple_loss=0.2107, pruned_loss=0.02724, over 4790.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2388, pruned_loss=0.04435, over 955768.29 frames. ], batch size: 25, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:46:14,263 INFO [finetune.py:976] (0/7) Epoch 30, batch 1450, loss[loss=0.1687, simple_loss=0.255, pruned_loss=0.04119, over 4846.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2395, pruned_loss=0.04417, over 956761.65 frames. ], batch size: 44, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:46:45,810 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.81 vs. limit=5.0 +2023-04-28 05:46:52,053 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.057e+02 1.458e+02 1.734e+02 2.083e+02 3.176e+02, threshold=3.469e+02, percent-clipped=0.0 +2023-04-28 05:46:52,214 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2969, 1.7466, 2.2276, 2.5996, 2.2406, 1.6856, 1.4475, 2.0186], + device='cuda:0'), covar=tensor([0.3051, 0.3028, 0.1507, 0.2113, 0.2531, 0.2561, 0.3982, 0.1776], + device='cuda:0'), in_proj_covar=tensor([0.0291, 0.0246, 0.0228, 0.0314, 0.0223, 0.0236, 0.0229, 0.0185], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 05:46:55,208 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167585.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:46:55,254 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.4072, 1.6553, 1.5842, 2.2848, 2.4166, 1.9698, 1.9335, 1.6888], + device='cuda:0'), covar=tensor([0.1676, 0.1882, 0.2067, 0.1651, 0.1148, 0.2066, 0.2343, 0.2463], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0307, 0.0350, 0.0285, 0.0323, 0.0305, 0.0301, 0.0376], + device='cuda:0'), out_proj_covar=tensor([6.4037e-05, 6.2742e-05, 7.3183e-05, 5.6780e-05, 6.5807e-05, 6.3251e-05, + 6.2039e-05, 7.9468e-05], device='cuda:0') +2023-04-28 05:47:03,065 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2002, 1.4920, 1.3404, 1.7378, 1.6480, 1.8265, 1.3931, 3.1299], + device='cuda:0'), covar=tensor([0.0657, 0.0798, 0.0794, 0.1199, 0.0619, 0.0441, 0.0716, 0.0175], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 05:47:06,140 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8208, 1.5132, 2.0361, 2.1380, 1.5082, 1.4266, 1.6699, 0.9971], + device='cuda:0'), covar=tensor([0.0505, 0.0685, 0.0370, 0.0606, 0.0623, 0.1113, 0.0489, 0.0670], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0067, 0.0065, 0.0068, 0.0075, 0.0094, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:47:21,141 INFO [finetune.py:976] (0/7) Epoch 30, batch 1500, loss[loss=0.1588, simple_loss=0.2495, pruned_loss=0.03405, over 4833.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2401, pruned_loss=0.04444, over 957072.22 frames. ], batch size: 47, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:47:56,714 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=167633.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:48:20,794 INFO [finetune.py:976] (0/7) Epoch 30, batch 1550, loss[loss=0.1577, simple_loss=0.2314, pruned_loss=0.04203, over 4902.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.2391, pruned_loss=0.04377, over 958577.61 frames. ], batch size: 43, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:48:40,251 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.357e+01 1.558e+02 1.802e+02 2.131e+02 3.986e+02, threshold=3.604e+02, percent-clipped=1.0 +2023-04-28 05:48:50,181 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4973, 1.4183, 1.7892, 1.7692, 1.3262, 1.2327, 1.4465, 0.8172], + device='cuda:0'), covar=tensor([0.0501, 0.0522, 0.0309, 0.0490, 0.0673, 0.0974, 0.0441, 0.0530], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0065, 0.0068, 0.0075, 0.0094, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:48:54,271 INFO [finetune.py:976] (0/7) Epoch 30, batch 1600, loss[loss=0.1347, simple_loss=0.2051, pruned_loss=0.03218, over 4190.00 frames. ], tot_loss[loss=0.1631, simple_loss=0.2382, pruned_loss=0.04396, over 958339.48 frames. ], batch size: 65, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:49:05,603 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167719.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:49:09,759 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.9804, 1.9170, 1.8993, 1.7240, 2.1518, 1.7102, 2.6626, 1.6719], + device='cuda:0'), covar=tensor([0.3420, 0.1964, 0.4388, 0.2642, 0.1328, 0.2309, 0.1142, 0.4154], + device='cuda:0'), in_proj_covar=tensor([0.0337, 0.0352, 0.0422, 0.0349, 0.0382, 0.0372, 0.0369, 0.0422], + device='cuda:0'), out_proj_covar=tensor([9.9523e-05, 1.0476e-04, 1.2739e-04, 1.0408e-04, 1.1300e-04, 1.1043e-04, + 1.0744e-04, 1.2661e-04], device='cuda:0') +2023-04-28 05:49:09,766 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5832, 1.4213, 1.8516, 1.8119, 1.3839, 1.3151, 1.4671, 0.8731], + device='cuda:0'), covar=tensor([0.0477, 0.0533, 0.0317, 0.0413, 0.0677, 0.1090, 0.0488, 0.0541], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0064, 0.0068, 0.0075, 0.0094, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:49:28,178 INFO [finetune.py:976] (0/7) Epoch 30, batch 1650, loss[loss=0.1632, simple_loss=0.2405, pruned_loss=0.04295, over 4836.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2355, pruned_loss=0.0434, over 957628.54 frames. ], batch size: 30, lr: 2.81e-03, grad_scale: 32.0 +2023-04-28 05:49:30,075 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167756.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:49:33,002 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=167760.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:49:37,263 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6669, 1.4652, 1.8298, 1.8712, 1.4668, 1.3892, 1.4760, 0.9380], + device='cuda:0'), covar=tensor([0.0443, 0.0673, 0.0322, 0.0556, 0.0706, 0.1037, 0.0505, 0.0481], + device='cuda:0'), in_proj_covar=tensor([0.0070, 0.0067, 0.0064, 0.0068, 0.0075, 0.0094, 0.0072, 0.0061], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:49:37,846 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=167767.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:49:44,467 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4840, 1.0929, 1.2622, 1.1888, 1.5551, 1.3360, 1.1752, 1.2626], + device='cuda:0'), covar=tensor([0.1460, 0.1141, 0.1605, 0.1227, 0.0701, 0.1251, 0.1554, 0.1949], + device='cuda:0'), in_proj_covar=tensor([0.0313, 0.0306, 0.0348, 0.0284, 0.0322, 0.0303, 0.0299, 0.0374], + device='cuda:0'), out_proj_covar=tensor([6.3665e-05, 6.2395e-05, 7.2784e-05, 5.6691e-05, 6.5560e-05, 6.2765e-05, + 6.1550e-05, 7.8969e-05], device='cuda:0') +2023-04-28 05:49:47,068 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.070e+02 1.494e+02 1.761e+02 2.054e+02 5.504e+02, threshold=3.522e+02, percent-clipped=6.0 +2023-04-28 05:49:47,189 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167780.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:49:52,931 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=167788.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:50:02,177 INFO [finetune.py:976] (0/7) Epoch 30, batch 1700, loss[loss=0.1667, simple_loss=0.2364, pruned_loss=0.04849, over 4734.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2329, pruned_loss=0.04276, over 956263.23 frames. ], batch size: 59, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:50:02,849 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=167804.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:50:04,193 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.61 vs. limit=5.0 +2023-04-28 05:50:05,281 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=167808.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:50:08,872 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3828, 1.7287, 1.5344, 1.9620, 1.9762, 2.1005, 1.6316, 3.8505], + device='cuda:0'), covar=tensor([0.0581, 0.0706, 0.0749, 0.1101, 0.0538, 0.0577, 0.0672, 0.0155], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0013, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 05:50:18,854 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=167828.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:50:25,220 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.81 vs. limit=2.0 +2023-04-28 05:50:31,930 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.46 vs. limit=2.0 +2023-04-28 05:50:35,264 INFO [finetune.py:976] (0/7) Epoch 30, batch 1750, loss[loss=0.1425, simple_loss=0.2001, pruned_loss=0.04243, over 4212.00 frames. ], tot_loss[loss=0.161, simple_loss=0.2351, pruned_loss=0.04344, over 956379.37 frames. ], batch size: 18, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:50:53,217 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.596e+01 1.560e+02 1.890e+02 2.217e+02 7.846e+02, threshold=3.780e+02, percent-clipped=0.0 +2023-04-28 05:51:08,163 INFO [finetune.py:976] (0/7) Epoch 30, batch 1800, loss[loss=0.1562, simple_loss=0.2227, pruned_loss=0.04483, over 4233.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2386, pruned_loss=0.0446, over 955627.75 frames. ], batch size: 18, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:51:41,397 INFO [finetune.py:976] (0/7) Epoch 30, batch 1850, loss[loss=0.1582, simple_loss=0.2331, pruned_loss=0.04165, over 4906.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2408, pruned_loss=0.04565, over 955272.34 frames. ], batch size: 36, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:52:04,613 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.089e+02 1.506e+02 1.821e+02 2.216e+02 4.142e+02, threshold=3.642e+02, percent-clipped=2.0 +2023-04-28 05:52:23,794 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-168000.pt +2023-04-28 05:52:26,789 INFO [finetune.py:976] (0/7) Epoch 30, batch 1900, loss[loss=0.1592, simple_loss=0.2318, pruned_loss=0.0433, over 4861.00 frames. ], tot_loss[loss=0.1663, simple_loss=0.2414, pruned_loss=0.04554, over 956050.04 frames. ], batch size: 31, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:52:38,805 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.84 vs. limit=5.0 +2023-04-28 05:53:03,997 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168035.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:53:31,845 INFO [finetune.py:976] (0/7) Epoch 30, batch 1950, loss[loss=0.1581, simple_loss=0.2263, pruned_loss=0.04499, over 4779.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2406, pruned_loss=0.04544, over 958041.86 frames. ], batch size: 29, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:53:56,245 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168075.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:54:04,670 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.011e+02 1.564e+02 1.911e+02 2.576e+02 9.918e+02, threshold=3.821e+02, percent-clipped=1.0 +2023-04-28 05:54:13,547 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168088.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:54:23,971 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168096.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 05:54:32,405 INFO [finetune.py:976] (0/7) Epoch 30, batch 2000, loss[loss=0.1427, simple_loss=0.2126, pruned_loss=0.03642, over 4928.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2375, pruned_loss=0.04458, over 957220.63 frames. ], batch size: 33, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:54:55,111 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168123.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:54:55,772 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6458, 1.6053, 1.9974, 2.0023, 1.4882, 1.4549, 1.6528, 1.0677], + device='cuda:0'), covar=tensor([0.0511, 0.0628, 0.0339, 0.0609, 0.0750, 0.0957, 0.0598, 0.0581], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0067, 0.0065, 0.0068, 0.0075, 0.0094, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:54:56,981 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1842, 2.7212, 2.1637, 2.3230, 1.5266, 1.5729, 2.2171, 1.5014], + device='cuda:0'), covar=tensor([0.1614, 0.1461, 0.1321, 0.1511, 0.2238, 0.1889, 0.0950, 0.2043], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0210, 0.0170, 0.0205, 0.0201, 0.0188, 0.0157, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 05:55:13,707 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=168136.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:55:16,718 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.5824, 1.3682, 1.3474, 1.0959, 1.3796, 1.2200, 1.6311, 1.2781], + device='cuda:0'), covar=tensor([0.2909, 0.1536, 0.3777, 0.2188, 0.1427, 0.1792, 0.1599, 0.4236], + device='cuda:0'), in_proj_covar=tensor([0.0339, 0.0353, 0.0424, 0.0351, 0.0384, 0.0375, 0.0369, 0.0423], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:55:30,828 INFO [finetune.py:976] (0/7) Epoch 30, batch 2050, loss[loss=0.1654, simple_loss=0.2344, pruned_loss=0.04825, over 4926.00 frames. ], tot_loss[loss=0.1608, simple_loss=0.2342, pruned_loss=0.0437, over 955718.11 frames. ], batch size: 33, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:55:47,309 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.348e+01 1.497e+02 1.684e+02 2.075e+02 3.378e+02, threshold=3.368e+02, percent-clipped=0.0 +2023-04-28 05:56:04,271 INFO [finetune.py:976] (0/7) Epoch 30, batch 2100, loss[loss=0.1375, simple_loss=0.2224, pruned_loss=0.02632, over 4795.00 frames. ], tot_loss[loss=0.1611, simple_loss=0.234, pruned_loss=0.04412, over 955294.01 frames. ], batch size: 29, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:56:21,233 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1256, 2.5271, 0.8280, 1.5359, 1.4865, 1.9745, 1.6229, 0.8728], + device='cuda:0'), covar=tensor([0.1531, 0.1391, 0.1805, 0.1304, 0.1197, 0.0924, 0.1572, 0.1813], + device='cuda:0'), in_proj_covar=tensor([0.0118, 0.0239, 0.0136, 0.0121, 0.0133, 0.0154, 0.0118, 0.0118], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:56:23,083 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168233.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:56:28,924 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.20 vs. limit=5.0 +2023-04-28 05:56:32,571 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9857, 2.5021, 1.0169, 1.3528, 1.9624, 1.2121, 3.4098, 1.6017], + device='cuda:0'), covar=tensor([0.0745, 0.0646, 0.0846, 0.1369, 0.0534, 0.1085, 0.0259, 0.0686], + device='cuda:0'), in_proj_covar=tensor([0.0051, 0.0064, 0.0047, 0.0046, 0.0049, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0008], + device='cuda:0') +2023-04-28 05:56:37,575 INFO [finetune.py:976] (0/7) Epoch 30, batch 2150, loss[loss=0.2142, simple_loss=0.2955, pruned_loss=0.06642, over 4898.00 frames. ], tot_loss[loss=0.1635, simple_loss=0.2371, pruned_loss=0.04493, over 957067.78 frames. ], batch size: 43, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:56:51,519 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.1947, 4.1895, 3.0207, 4.9212, 4.1871, 4.2723, 1.6503, 4.2206], + device='cuda:0'), covar=tensor([0.1497, 0.0997, 0.3669, 0.0929, 0.3498, 0.1339, 0.5826, 0.2089], + device='cuda:0'), in_proj_covar=tensor([0.0248, 0.0217, 0.0253, 0.0302, 0.0300, 0.0249, 0.0274, 0.0273], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 05:56:54,473 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.048e+02 1.582e+02 1.868e+02 2.335e+02 3.831e+02, threshold=3.737e+02, percent-clipped=1.0 +2023-04-28 05:57:01,074 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1352, 1.6286, 2.1575, 2.5312, 2.2173, 2.0850, 2.1224, 2.0799], + device='cuda:0'), covar=tensor([0.4239, 0.6552, 0.5570, 0.5590, 0.5301, 0.7235, 0.8365, 0.7369], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0426, 0.0520, 0.0508, 0.0475, 0.0515, 0.0515, 0.0530], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 05:57:04,560 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=168294.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:57:08,201 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8420, 2.4672, 2.0583, 1.9798, 1.3639, 1.3913, 2.1713, 1.3285], + device='cuda:0'), covar=tensor([0.1621, 0.1298, 0.1324, 0.1527, 0.2193, 0.1888, 0.0895, 0.1951], + device='cuda:0'), in_proj_covar=tensor([0.0199, 0.0210, 0.0170, 0.0205, 0.0201, 0.0188, 0.0157, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 05:57:08,326 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.56 vs. limit=5.0 +2023-04-28 05:57:09,875 INFO [finetune.py:976] (0/7) Epoch 30, batch 2200, loss[loss=0.1835, simple_loss=0.2568, pruned_loss=0.05503, over 4885.00 frames. ], tot_loss[loss=0.1657, simple_loss=0.2398, pruned_loss=0.04583, over 956555.86 frames. ], batch size: 43, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:58:08,577 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.99 vs. limit=5.0 +2023-04-28 05:58:09,643 INFO [finetune.py:976] (0/7) Epoch 30, batch 2250, loss[loss=0.1917, simple_loss=0.264, pruned_loss=0.05973, over 4821.00 frames. ], tot_loss[loss=0.1667, simple_loss=0.2408, pruned_loss=0.04627, over 956560.65 frames. ], batch size: 33, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:58:40,746 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168375.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:58:43,715 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.481e+02 1.883e+02 2.203e+02 3.675e+02, threshold=3.765e+02, percent-clipped=0.0 +2023-04-28 05:58:55,043 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168391.0, num_to_drop=1, layers_to_drop={3} +2023-04-28 05:59:05,520 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6040, 3.5853, 1.1799, 1.7926, 2.0015, 2.6601, 1.9466, 1.0660], + device='cuda:0'), covar=tensor([0.1298, 0.0702, 0.1709, 0.1234, 0.0979, 0.0824, 0.1461, 0.1917], + device='cuda:0'), in_proj_covar=tensor([0.0119, 0.0240, 0.0137, 0.0122, 0.0133, 0.0155, 0.0119, 0.0119], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 05:59:13,986 INFO [finetune.py:976] (0/7) Epoch 30, batch 2300, loss[loss=0.1216, simple_loss=0.202, pruned_loss=0.02059, over 4820.00 frames. ], tot_loss[loss=0.1659, simple_loss=0.2404, pruned_loss=0.04565, over 954875.89 frames. ], batch size: 25, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 05:59:38,047 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=168423.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:59:38,070 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168423.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 05:59:50,027 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7133, 1.3502, 4.5446, 4.2571, 3.9687, 4.3631, 4.2599, 3.9763], + device='cuda:0'), covar=tensor([0.7029, 0.6003, 0.1013, 0.1781, 0.1083, 0.1539, 0.1220, 0.1645], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0311, 0.0410, 0.0413, 0.0353, 0.0419, 0.0318, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:00:16,725 INFO [finetune.py:976] (0/7) Epoch 30, batch 2350, loss[loss=0.1477, simple_loss=0.2209, pruned_loss=0.03724, over 4864.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2388, pruned_loss=0.04518, over 955531.25 frames. ], batch size: 31, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:00:37,862 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=168471.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:00:48,858 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.790e+01 1.485e+02 1.765e+02 2.221e+02 4.572e+02, threshold=3.529e+02, percent-clipped=1.0 +2023-04-28 06:01:21,061 INFO [finetune.py:976] (0/7) Epoch 30, batch 2400, loss[loss=0.1614, simple_loss=0.2395, pruned_loss=0.04169, over 4860.00 frames. ], tot_loss[loss=0.1617, simple_loss=0.2354, pruned_loss=0.04404, over 958004.65 frames. ], batch size: 49, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:01:30,579 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7267, 1.4715, 1.9430, 1.9839, 1.5173, 1.4068, 1.5778, 1.0449], + device='cuda:0'), covar=tensor([0.0538, 0.0602, 0.0369, 0.0470, 0.0722, 0.1134, 0.0529, 0.0579], + device='cuda:0'), in_proj_covar=tensor([0.0071, 0.0067, 0.0065, 0.0069, 0.0076, 0.0095, 0.0072, 0.0062], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0003, 0.0003, 0.0003, 0.0004, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 06:02:28,239 INFO [finetune.py:976] (0/7) Epoch 30, batch 2450, loss[loss=0.2064, simple_loss=0.2628, pruned_loss=0.07504, over 4845.00 frames. ], tot_loss[loss=0.1595, simple_loss=0.2322, pruned_loss=0.04336, over 955400.82 frames. ], batch size: 47, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:03:10,383 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.038e+02 1.524e+02 1.793e+02 2.156e+02 5.076e+02, threshold=3.587e+02, percent-clipped=1.0 +2023-04-28 06:03:21,574 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=168589.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:03:36,318 INFO [finetune.py:976] (0/7) Epoch 30, batch 2500, loss[loss=0.1705, simple_loss=0.2681, pruned_loss=0.03641, over 4806.00 frames. ], tot_loss[loss=0.1618, simple_loss=0.2349, pruned_loss=0.04434, over 955325.02 frames. ], batch size: 45, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:04:11,018 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.45 vs. limit=2.0 +2023-04-28 06:04:32,489 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.4896, 1.2809, 0.6672, 1.1747, 1.3286, 1.3453, 1.2697, 1.2794], + device='cuda:0'), covar=tensor([0.0483, 0.0386, 0.0376, 0.0550, 0.0301, 0.0502, 0.0489, 0.0579], + device='cuda:0'), in_proj_covar=tensor([0.0027, 0.0023, 0.0021, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0039, 0.0053, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 06:04:49,840 INFO [finetune.py:976] (0/7) Epoch 30, batch 2550, loss[loss=0.145, simple_loss=0.2302, pruned_loss=0.02983, over 4815.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2382, pruned_loss=0.04525, over 954713.06 frames. ], batch size: 38, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:05:15,421 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3737, 1.7396, 1.7883, 1.8803, 1.7608, 1.7841, 1.7896, 1.8056], + device='cuda:0'), covar=tensor([0.3928, 0.5413, 0.4189, 0.4461, 0.5373, 0.7031, 0.5259, 0.4798], + device='cuda:0'), in_proj_covar=tensor([0.0346, 0.0377, 0.0334, 0.0345, 0.0354, 0.0397, 0.0365, 0.0337], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 06:05:23,468 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.119e+02 1.608e+02 1.886e+02 2.283e+02 4.830e+02, threshold=3.772e+02, percent-clipped=5.0 +2023-04-28 06:05:31,386 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168691.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:05:38,672 INFO [finetune.py:976] (0/7) Epoch 30, batch 2600, loss[loss=0.1455, simple_loss=0.2224, pruned_loss=0.03429, over 4810.00 frames. ], tot_loss[loss=0.1651, simple_loss=0.2398, pruned_loss=0.04523, over 953893.91 frames. ], batch size: 25, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:05:44,283 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.33 vs. limit=5.0 +2023-04-28 06:06:26,238 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=168739.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:06:37,255 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3220, 1.8500, 2.2149, 2.6808, 2.2133, 1.7570, 1.5772, 1.9771], + device='cuda:0'), covar=tensor([0.3504, 0.3200, 0.1668, 0.2198, 0.2669, 0.2777, 0.3947, 0.2010], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0247, 0.0229, 0.0316, 0.0223, 0.0237, 0.0230, 0.0187], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 06:06:40,211 INFO [finetune.py:976] (0/7) Epoch 30, batch 2650, loss[loss=0.1917, simple_loss=0.2685, pruned_loss=0.05749, over 4819.00 frames. ], tot_loss[loss=0.166, simple_loss=0.2409, pruned_loss=0.04557, over 953881.24 frames. ], batch size: 38, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:07:00,346 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.4356, 1.2903, 1.4533, 1.0433, 1.3720, 1.2339, 1.7578, 1.3402], + device='cuda:0'), covar=tensor([0.3491, 0.2065, 0.4842, 0.2677, 0.1585, 0.2215, 0.1614, 0.4555], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0356, 0.0427, 0.0353, 0.0384, 0.0377, 0.0371, 0.0427], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:07:11,146 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.6113, 1.9262, 1.6852, 2.5169, 2.5551, 2.1338, 2.1232, 1.7342], + device='cuda:0'), covar=tensor([0.1530, 0.1705, 0.1747, 0.1154, 0.1145, 0.1654, 0.1898, 0.2224], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0306, 0.0349, 0.0285, 0.0324, 0.0303, 0.0299, 0.0376], + device='cuda:0'), out_proj_covar=tensor([6.3752e-05, 6.2543e-05, 7.2981e-05, 5.6868e-05, 6.5895e-05, 6.2786e-05, + 6.1650e-05, 7.9297e-05], device='cuda:0') +2023-04-28 06:07:17,598 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.117e+02 1.537e+02 1.777e+02 2.075e+02 3.537e+02, threshold=3.553e+02, percent-clipped=0.0 +2023-04-28 06:07:44,073 INFO [finetune.py:976] (0/7) Epoch 30, batch 2700, loss[loss=0.14, simple_loss=0.2212, pruned_loss=0.02945, over 4800.00 frames. ], tot_loss[loss=0.1649, simple_loss=0.2401, pruned_loss=0.04483, over 954127.29 frames. ], batch size: 45, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:08:17,671 INFO [finetune.py:976] (0/7) Epoch 30, batch 2750, loss[loss=0.146, simple_loss=0.218, pruned_loss=0.03697, over 4871.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2371, pruned_loss=0.04464, over 954377.53 frames. ], batch size: 31, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:08:35,307 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.781e+01 1.528e+02 1.886e+02 2.380e+02 6.138e+02, threshold=3.773e+02, percent-clipped=2.0 +2023-04-28 06:08:41,776 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=168889.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:08:50,765 INFO [finetune.py:976] (0/7) Epoch 30, batch 2800, loss[loss=0.1581, simple_loss=0.2361, pruned_loss=0.04002, over 4937.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.234, pruned_loss=0.04357, over 954027.52 frames. ], batch size: 38, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:09:12,948 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=168937.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:09:24,479 INFO [finetune.py:976] (0/7) Epoch 30, batch 2850, loss[loss=0.1092, simple_loss=0.1912, pruned_loss=0.01361, over 4788.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2323, pruned_loss=0.04284, over 955253.43 frames. ], batch size: 29, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:09:33,139 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168967.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:09:34,363 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=168969.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 06:09:41,979 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.589e+01 1.497e+02 1.758e+02 2.128e+02 6.037e+02, threshold=3.517e+02, percent-clipped=2.0 +2023-04-28 06:09:58,544 INFO [finetune.py:976] (0/7) Epoch 30, batch 2900, loss[loss=0.1538, simple_loss=0.2462, pruned_loss=0.03068, over 4837.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2355, pruned_loss=0.0438, over 955131.65 frames. ], batch size: 49, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:10:14,828 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169028.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:10:16,014 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169030.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 06:10:24,884 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.5551, 4.4710, 3.0044, 5.2387, 4.6341, 4.5351, 1.9010, 4.5877], + device='cuda:0'), covar=tensor([0.1620, 0.1104, 0.3443, 0.0897, 0.3724, 0.1542, 0.6331, 0.2157], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0218, 0.0255, 0.0304, 0.0301, 0.0251, 0.0277, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 06:10:31,849 INFO [finetune.py:976] (0/7) Epoch 30, batch 2950, loss[loss=0.1855, simple_loss=0.264, pruned_loss=0.05349, over 4828.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2391, pruned_loss=0.045, over 954791.29 frames. ], batch size: 47, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:10:49,914 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.313e+01 1.646e+02 1.860e+02 2.214e+02 4.123e+02, threshold=3.720e+02, percent-clipped=2.0 +2023-04-28 06:10:50,643 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169082.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:11:05,764 INFO [finetune.py:976] (0/7) Epoch 30, batch 3000, loss[loss=0.1535, simple_loss=0.234, pruned_loss=0.03652, over 4805.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2397, pruned_loss=0.04519, over 952033.38 frames. ], batch size: 45, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:11:05,765 INFO [finetune.py:1001] (0/7) Computing validation loss +2023-04-28 06:11:12,424 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2092, 1.7395, 2.0616, 2.3243, 2.0973, 1.7166, 1.2716, 1.8070], + device='cuda:0'), covar=tensor([0.3201, 0.3015, 0.1537, 0.2019, 0.2623, 0.2659, 0.4013, 0.1966], + device='cuda:0'), in_proj_covar=tensor([0.0292, 0.0246, 0.0229, 0.0315, 0.0223, 0.0236, 0.0230, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 06:11:16,543 INFO [finetune.py:1010] (0/7) Epoch 30, validation: loss=0.1534, simple_loss=0.2215, pruned_loss=0.04259, over 2265189.00 frames. +2023-04-28 06:11:16,543 INFO [finetune.py:1011] (0/7) Maximum memory allocated so far is 6519MB +2023-04-28 06:11:24,697 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169116.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:11:43,917 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.30 vs. limit=2.0 +2023-04-28 06:11:52,070 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169143.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:12:04,394 INFO [finetune.py:976] (0/7) Epoch 30, batch 3050, loss[loss=0.1738, simple_loss=0.2305, pruned_loss=0.05859, over 4270.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2393, pruned_loss=0.04509, over 949987.53 frames. ], batch size: 66, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:12:13,431 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169159.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:12:36,607 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169177.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:12:42,082 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.026e+02 1.505e+02 1.782e+02 2.187e+02 4.062e+02, threshold=3.563e+02, percent-clipped=1.0 +2023-04-28 06:13:06,207 INFO [finetune.py:976] (0/7) Epoch 30, batch 3100, loss[loss=0.1258, simple_loss=0.2103, pruned_loss=0.02065, over 4751.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2378, pruned_loss=0.04432, over 951983.49 frames. ], batch size: 54, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:13:29,078 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2711, 1.7868, 2.1820, 2.6427, 2.1502, 1.7710, 1.4911, 2.0551], + device='cuda:0'), covar=tensor([0.3047, 0.3032, 0.1712, 0.2026, 0.2478, 0.2524, 0.4038, 0.1818], + device='cuda:0'), in_proj_covar=tensor([0.0293, 0.0247, 0.0229, 0.0315, 0.0223, 0.0236, 0.0230, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 06:13:29,645 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169220.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:13:50,607 INFO [finetune.py:976] (0/7) Epoch 30, batch 3150, loss[loss=0.1448, simple_loss=0.2197, pruned_loss=0.03491, over 4829.00 frames. ], tot_loss[loss=0.1616, simple_loss=0.2352, pruned_loss=0.04403, over 951933.79 frames. ], batch size: 40, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:14:10,083 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.051e+01 1.353e+02 1.739e+02 2.048e+02 6.582e+02, threshold=3.478e+02, percent-clipped=1.0 +2023-04-28 06:14:23,996 INFO [finetune.py:976] (0/7) Epoch 30, batch 3200, loss[loss=0.1338, simple_loss=0.2055, pruned_loss=0.03106, over 4798.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2318, pruned_loss=0.04335, over 952251.35 frames. ], batch size: 25, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:14:38,825 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169323.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:14:40,569 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169325.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 06:14:57,968 INFO [finetune.py:976] (0/7) Epoch 30, batch 3250, loss[loss=0.1337, simple_loss=0.2093, pruned_loss=0.02908, over 4776.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.2335, pruned_loss=0.0445, over 953870.77 frames. ], batch size: 26, lr: 2.80e-03, grad_scale: 32.0 +2023-04-28 06:15:18,032 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.661e+01 1.547e+02 1.939e+02 2.313e+02 4.246e+02, threshold=3.878e+02, percent-clipped=2.0 +2023-04-28 06:15:28,760 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.27 vs. limit=2.0 +2023-04-28 06:15:32,127 INFO [finetune.py:976] (0/7) Epoch 30, batch 3300, loss[loss=0.1462, simple_loss=0.2373, pruned_loss=0.02759, over 4789.00 frames. ], tot_loss[loss=0.1643, simple_loss=0.2376, pruned_loss=0.04554, over 953137.67 frames. ], batch size: 29, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:15:34,718 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5614, 1.1761, 1.2581, 1.2544, 1.6621, 1.3883, 1.1787, 1.2343], + device='cuda:0'), covar=tensor([0.1371, 0.1262, 0.1787, 0.1353, 0.0775, 0.1327, 0.1679, 0.2095], + device='cuda:0'), in_proj_covar=tensor([0.0315, 0.0309, 0.0351, 0.0287, 0.0326, 0.0306, 0.0302, 0.0378], + device='cuda:0'), out_proj_covar=tensor([6.3884e-05, 6.2952e-05, 7.3450e-05, 5.7287e-05, 6.6470e-05, 6.3311e-05, + 6.2179e-05, 7.9727e-05], device='cuda:0') +2023-04-28 06:15:40,602 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.3083, 1.7291, 2.1895, 2.5855, 2.2351, 1.7043, 1.4970, 2.0930], + device='cuda:0'), covar=tensor([0.3038, 0.2942, 0.1589, 0.2123, 0.2375, 0.2584, 0.3872, 0.1751], + device='cuda:0'), in_proj_covar=tensor([0.0295, 0.0249, 0.0231, 0.0318, 0.0226, 0.0238, 0.0232, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0001], + device='cuda:0') +2023-04-28 06:15:41,898 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.78 vs. limit=2.0 +2023-04-28 06:15:44,173 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.5774, 4.7473, 3.1269, 5.3413, 4.6709, 4.6162, 2.2172, 4.6385], + device='cuda:0'), covar=tensor([0.1627, 0.0915, 0.3480, 0.1088, 0.4581, 0.1672, 0.5430, 0.2173], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0219, 0.0255, 0.0306, 0.0302, 0.0252, 0.0277, 0.0277], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 06:15:56,492 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169438.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:16:05,662 INFO [finetune.py:976] (0/7) Epoch 30, batch 3350, loss[loss=0.2126, simple_loss=0.2834, pruned_loss=0.0709, over 4744.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2399, pruned_loss=0.04619, over 952937.61 frames. ], batch size: 54, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:16:18,729 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169472.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:16:26,057 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.565e+02 1.911e+02 2.262e+02 9.005e+02, threshold=3.822e+02, percent-clipped=1.0 +2023-04-28 06:16:26,194 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6602, 2.6218, 2.1514, 2.3877, 2.6360, 2.5174, 3.5832, 2.0862], + device='cuda:0'), covar=tensor([0.3777, 0.2268, 0.4347, 0.3113, 0.1884, 0.2214, 0.1446, 0.4217], + device='cuda:0'), in_proj_covar=tensor([0.0340, 0.0357, 0.0427, 0.0353, 0.0386, 0.0376, 0.0372, 0.0427], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:16:39,418 INFO [finetune.py:976] (0/7) Epoch 30, batch 3400, loss[loss=0.2031, simple_loss=0.2643, pruned_loss=0.07092, over 4829.00 frames. ], tot_loss[loss=0.1671, simple_loss=0.2412, pruned_loss=0.04652, over 954848.85 frames. ], batch size: 49, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:16:45,491 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6420, 1.3809, 4.2775, 4.0548, 3.7152, 4.1295, 3.9179, 3.7349], + device='cuda:0'), covar=tensor([0.7777, 0.5545, 0.1009, 0.1557, 0.1100, 0.1549, 0.2172, 0.1444], + device='cuda:0'), in_proj_covar=tensor([0.0312, 0.0310, 0.0409, 0.0411, 0.0351, 0.0416, 0.0317, 0.0365], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:16:47,275 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169515.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:17:12,722 INFO [finetune.py:976] (0/7) Epoch 30, batch 3450, loss[loss=0.1826, simple_loss=0.2582, pruned_loss=0.05351, over 4821.00 frames. ], tot_loss[loss=0.1662, simple_loss=0.2404, pruned_loss=0.04595, over 953960.29 frames. ], batch size: 33, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:17:23,452 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7062, 1.5101, 1.7562, 2.0055, 2.1211, 1.6030, 1.3637, 1.9127], + device='cuda:0'), covar=tensor([0.0776, 0.1257, 0.0805, 0.0582, 0.0571, 0.0857, 0.0776, 0.0533], + device='cuda:0'), in_proj_covar=tensor([0.0183, 0.0204, 0.0184, 0.0171, 0.0178, 0.0179, 0.0150, 0.0178], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:17:36,961 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.050e+02 1.504e+02 1.780e+02 2.155e+02 3.271e+02, threshold=3.560e+02, percent-clipped=0.0 +2023-04-28 06:18:07,739 INFO [finetune.py:976] (0/7) Epoch 30, batch 3500, loss[loss=0.17, simple_loss=0.2392, pruned_loss=0.05043, over 4821.00 frames. ], tot_loss[loss=0.1642, simple_loss=0.2377, pruned_loss=0.04536, over 954462.23 frames. ], batch size: 39, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:18:22,951 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-28 06:18:29,321 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169621.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:18:31,005 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169623.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:18:32,238 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169625.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 06:19:13,502 INFO [finetune.py:976] (0/7) Epoch 30, batch 3550, loss[loss=0.1413, simple_loss=0.2099, pruned_loss=0.03634, over 4390.00 frames. ], tot_loss[loss=0.1606, simple_loss=0.2336, pruned_loss=0.04382, over 953729.24 frames. ], batch size: 19, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:19:13,603 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3051, 1.6972, 1.5141, 1.8816, 1.8088, 1.9439, 1.4995, 3.7004], + device='cuda:0'), covar=tensor([0.0620, 0.0794, 0.0765, 0.1145, 0.0592, 0.0505, 0.0688, 0.0110], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0014, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 06:19:22,180 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5181, 3.0108, 1.1189, 1.7957, 2.3843, 1.6609, 4.3110, 2.2925], + device='cuda:0'), covar=tensor([0.0613, 0.0764, 0.0865, 0.1189, 0.0499, 0.0923, 0.0224, 0.0524], + device='cuda:0'), in_proj_covar=tensor([0.0050, 0.0063, 0.0046, 0.0046, 0.0049, 0.0051, 0.0072, 0.0050], + device='cuda:0'), out_proj_covar=tensor([0.0008, 0.0010, 0.0007, 0.0008, 0.0008, 0.0008, 0.0010, 0.0007], + device='cuda:0') +2023-04-28 06:19:30,784 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=169671.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:19:31,504 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8508, 1.0271, 1.7649, 2.3058, 1.8816, 1.7535, 1.7903, 1.7037], + device='cuda:0'), covar=tensor([0.4723, 0.6971, 0.6659, 0.5697, 0.6210, 0.8119, 0.8254, 0.9238], + device='cuda:0'), in_proj_covar=tensor([0.0447, 0.0427, 0.0522, 0.0508, 0.0475, 0.0515, 0.0515, 0.0531], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:19:32,038 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=169673.0, num_to_drop=1, layers_to_drop={1} +2023-04-28 06:19:40,615 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1536, 1.5034, 1.3104, 1.7451, 1.5965, 1.6805, 1.3629, 3.0854], + device='cuda:0'), covar=tensor([0.0695, 0.0859, 0.0840, 0.1219, 0.0663, 0.0499, 0.0799, 0.0155], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0014, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 06:19:42,308 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.126e+01 1.430e+02 1.728e+02 2.038e+02 3.209e+02, threshold=3.455e+02, percent-clipped=0.0 +2023-04-28 06:19:43,550 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169682.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:20:12,670 INFO [finetune.py:976] (0/7) Epoch 30, batch 3600, loss[loss=0.1537, simple_loss=0.2203, pruned_loss=0.04355, over 4909.00 frames. ], tot_loss[loss=0.1604, simple_loss=0.2328, pruned_loss=0.04401, over 954729.04 frames. ], batch size: 32, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:20:26,238 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.2385, 1.5392, 1.3152, 1.5289, 1.3055, 1.2300, 1.3469, 0.9858], + device='cuda:0'), covar=tensor([0.1783, 0.1334, 0.0978, 0.1149, 0.3761, 0.1303, 0.1806, 0.2428], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0298, 0.0216, 0.0272, 0.0306, 0.0253, 0.0246, 0.0259], + device='cuda:0'), out_proj_covar=tensor([1.1144e-04, 1.1676e-04, 8.4691e-05, 1.0646e-04, 1.2321e-04, 9.9368e-05, + 9.8929e-05, 1.0171e-04], device='cuda:0') +2023-04-28 06:20:45,830 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169729.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:20:57,480 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169738.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:21:10,875 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169747.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:21:20,082 INFO [finetune.py:976] (0/7) Epoch 30, batch 3650, loss[loss=0.1182, simple_loss=0.198, pruned_loss=0.01919, over 4769.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2351, pruned_loss=0.04485, over 953916.54 frames. ], batch size: 28, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:21:43,285 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169772.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:21:53,879 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.801e+01 1.645e+02 1.930e+02 2.224e+02 4.571e+02, threshold=3.861e+02, percent-clipped=2.0 +2023-04-28 06:22:03,120 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=169786.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:22:03,187 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=169786.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:22:05,610 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169790.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:22:25,954 INFO [finetune.py:976] (0/7) Epoch 30, batch 3700, loss[loss=0.1168, simple_loss=0.1843, pruned_loss=0.02471, over 4294.00 frames. ], tot_loss[loss=0.1641, simple_loss=0.2377, pruned_loss=0.04526, over 953174.36 frames. ], batch size: 18, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:22:27,271 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0227, 1.8055, 2.1271, 2.4378, 2.4480, 2.0666, 1.8062, 2.2299], + device='cuda:0'), covar=tensor([0.0842, 0.1241, 0.0722, 0.0580, 0.0575, 0.0789, 0.0715, 0.0583], + device='cuda:0'), in_proj_covar=tensor([0.0185, 0.0206, 0.0185, 0.0173, 0.0180, 0.0181, 0.0152, 0.0179], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:22:35,255 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169808.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:22:44,952 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=169815.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:22:47,934 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=169820.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:22:57,421 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8943, 2.2729, 2.3349, 2.3912, 2.2099, 2.3671, 2.4469, 2.3801], + device='cuda:0'), covar=tensor([0.3298, 0.4958, 0.4523, 0.4759, 0.5477, 0.6007, 0.4781, 0.4679], + device='cuda:0'), in_proj_covar=tensor([0.0345, 0.0377, 0.0333, 0.0345, 0.0354, 0.0397, 0.0365, 0.0338], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 06:23:07,753 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.22 vs. limit=2.0 +2023-04-28 06:23:22,326 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=169847.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:23:31,471 INFO [finetune.py:976] (0/7) Epoch 30, batch 3750, loss[loss=0.1679, simple_loss=0.2502, pruned_loss=0.04275, over 4802.00 frames. ], tot_loss[loss=0.1648, simple_loss=0.2389, pruned_loss=0.04532, over 952603.51 frames. ], batch size: 40, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:23:33,941 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.4784, 4.4264, 3.1276, 5.1324, 4.4776, 4.4756, 1.9890, 4.4367], + device='cuda:0'), covar=tensor([0.1539, 0.0910, 0.3561, 0.0965, 0.2702, 0.1556, 0.5460, 0.1950], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0219, 0.0254, 0.0304, 0.0302, 0.0251, 0.0275, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 06:23:42,216 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=169863.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:24:04,805 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.130e+02 1.513e+02 1.804e+02 2.185e+02 4.010e+02, threshold=3.607e+02, percent-clipped=1.0 +2023-04-28 06:24:36,635 INFO [finetune.py:976] (0/7) Epoch 30, batch 3800, loss[loss=0.1549, simple_loss=0.2277, pruned_loss=0.04106, over 4729.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2385, pruned_loss=0.04536, over 951674.78 frames. ], batch size: 54, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:25:06,283 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7383, 1.3359, 1.8751, 2.2418, 1.8441, 1.7029, 1.7785, 1.7262], + device='cuda:0'), covar=tensor([0.4333, 0.6467, 0.5953, 0.5253, 0.5642, 0.7166, 0.7328, 0.8547], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0429, 0.0524, 0.0510, 0.0477, 0.0517, 0.0517, 0.0533], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:25:40,863 INFO [finetune.py:976] (0/7) Epoch 30, batch 3850, loss[loss=0.1913, simple_loss=0.2729, pruned_loss=0.05485, over 4242.00 frames. ], tot_loss[loss=0.1632, simple_loss=0.2372, pruned_loss=0.04461, over 951699.06 frames. ], batch size: 65, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:26:12,317 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=169977.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:26:14,680 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.720e+01 1.456e+02 1.655e+02 1.971e+02 4.065e+02, threshold=3.309e+02, percent-clipped=1.0 +2023-04-28 06:26:41,741 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/checkpoint-170000.pt +2023-04-28 06:26:45,747 INFO [finetune.py:976] (0/7) Epoch 30, batch 3900, loss[loss=0.1504, simple_loss=0.2241, pruned_loss=0.03834, over 4825.00 frames. ], tot_loss[loss=0.1623, simple_loss=0.2357, pruned_loss=0.04442, over 953914.98 frames. ], batch size: 25, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:27:35,969 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.21 vs. limit=2.0 +2023-04-28 06:27:40,096 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170045.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:27:45,683 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([0.8616, 1.1678, 3.2827, 3.0619, 2.9810, 3.2511, 3.2268, 2.9285], + device='cuda:0'), covar=tensor([0.7681, 0.5398, 0.1509, 0.2239, 0.1382, 0.2156, 0.1703, 0.1766], + device='cuda:0'), in_proj_covar=tensor([0.0308, 0.0308, 0.0403, 0.0406, 0.0346, 0.0412, 0.0314, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:27:45,817 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.37 vs. limit=2.0 +2023-04-28 06:27:46,937 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9347, 2.3930, 2.0250, 2.3228, 1.7429, 2.0438, 2.0002, 1.5592], + device='cuda:0'), covar=tensor([0.1950, 0.1138, 0.0838, 0.1114, 0.3001, 0.1105, 0.1787, 0.2519], + device='cuda:0'), in_proj_covar=tensor([0.0279, 0.0298, 0.0216, 0.0271, 0.0306, 0.0253, 0.0246, 0.0259], + device='cuda:0'), out_proj_covar=tensor([1.1116e-04, 1.1711e-04, 8.4615e-05, 1.0630e-04, 1.2304e-04, 9.9419e-05, + 9.8832e-05, 1.0182e-04], device='cuda:0') +2023-04-28 06:27:48,653 INFO [finetune.py:976] (0/7) Epoch 30, batch 3950, loss[loss=0.1603, simple_loss=0.2257, pruned_loss=0.04744, over 4905.00 frames. ], tot_loss[loss=0.1607, simple_loss=0.2335, pruned_loss=0.0439, over 955866.62 frames. ], batch size: 32, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:28:24,770 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.030e+02 1.459e+02 1.752e+02 2.144e+02 4.109e+02, threshold=3.505e+02, percent-clipped=2.0 +2023-04-28 06:28:27,235 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170085.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:28:46,915 INFO [finetune.py:976] (0/7) Epoch 30, batch 4000, loss[loss=0.1784, simple_loss=0.251, pruned_loss=0.05293, over 4900.00 frames. ], tot_loss[loss=0.1593, simple_loss=0.2323, pruned_loss=0.04315, over 954944.75 frames. ], batch size: 36, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:28:46,981 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170103.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:28:47,683 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.0014, 2.3610, 1.9603, 1.9177, 1.4819, 1.5308, 1.9692, 1.4456], + device='cuda:0'), covar=tensor([0.1608, 0.1376, 0.1321, 0.1472, 0.2251, 0.1839, 0.0981, 0.1958], + device='cuda:0'), in_proj_covar=tensor([0.0197, 0.0208, 0.0168, 0.0202, 0.0199, 0.0185, 0.0155, 0.0186], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 06:28:53,786 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170106.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:29:04,473 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3803, 1.3961, 4.0450, 3.8044, 3.5608, 3.8823, 3.8441, 3.6065], + device='cuda:0'), covar=tensor([0.7344, 0.5558, 0.1117, 0.1981, 0.1330, 0.1925, 0.1598, 0.1542], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0308, 0.0403, 0.0406, 0.0345, 0.0412, 0.0314, 0.0361], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:29:17,292 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.54 vs. limit=2.0 +2023-04-28 06:29:25,905 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6418, 3.5920, 0.9420, 1.8945, 2.0102, 2.5073, 1.9783, 0.9577], + device='cuda:0'), covar=tensor([0.1303, 0.0801, 0.1910, 0.1180, 0.1015, 0.0959, 0.1484, 0.1948], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0236, 0.0135, 0.0119, 0.0130, 0.0152, 0.0117, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 06:29:39,697 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170142.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:29:50,077 INFO [finetune.py:976] (0/7) Epoch 30, batch 4050, loss[loss=0.1643, simple_loss=0.2417, pruned_loss=0.04345, over 4905.00 frames. ], tot_loss[loss=0.1625, simple_loss=0.236, pruned_loss=0.04448, over 953872.34 frames. ], batch size: 35, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:30:16,355 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.23 vs. limit=2.0 +2023-04-28 06:30:20,530 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5394, 0.7144, 1.4886, 1.9165, 1.6081, 1.4467, 1.5057, 1.4885], + device='cuda:0'), covar=tensor([0.4228, 0.6231, 0.5462, 0.5216, 0.5462, 0.6886, 0.7001, 0.8548], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0428, 0.0522, 0.0508, 0.0476, 0.0516, 0.0517, 0.0532], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:30:22,175 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.182e+02 1.558e+02 1.902e+02 2.221e+02 4.560e+02, threshold=3.805e+02, percent-clipped=3.0 +2023-04-28 06:30:36,789 INFO [finetune.py:976] (0/7) Epoch 30, batch 4100, loss[loss=0.2096, simple_loss=0.2763, pruned_loss=0.07147, over 4836.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2388, pruned_loss=0.04507, over 955887.49 frames. ], batch size: 49, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:31:10,085 INFO [finetune.py:976] (0/7) Epoch 30, batch 4150, loss[loss=0.1401, simple_loss=0.2182, pruned_loss=0.03102, over 4749.00 frames. ], tot_loss[loss=0.1646, simple_loss=0.2394, pruned_loss=0.04493, over 957648.51 frames. ], batch size: 26, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:31:26,669 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170277.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:31:29,027 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.851e+01 1.521e+02 1.781e+02 2.076e+02 3.722e+02, threshold=3.562e+02, percent-clipped=0.0 +2023-04-28 06:31:42,966 INFO [finetune.py:976] (0/7) Epoch 30, batch 4200, loss[loss=0.1609, simple_loss=0.2385, pruned_loss=0.04159, over 4893.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2392, pruned_loss=0.04511, over 955842.19 frames. ], batch size: 36, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:31:58,819 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=170325.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:32:16,768 INFO [finetune.py:976] (0/7) Epoch 30, batch 4250, loss[loss=0.1506, simple_loss=0.2173, pruned_loss=0.04199, over 4774.00 frames. ], tot_loss[loss=0.1637, simple_loss=0.2374, pruned_loss=0.04495, over 956607.97 frames. ], batch size: 28, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:32:36,244 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.843e+01 1.455e+02 1.729e+02 2.061e+02 4.081e+02, threshold=3.459e+02, percent-clipped=1.0 +2023-04-28 06:32:38,777 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170385.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:32:44,189 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.8585, 2.7863, 2.3584, 3.2783, 2.9011, 2.8147, 1.2576, 2.7566], + device='cuda:0'), covar=tensor([0.2324, 0.1827, 0.3318, 0.3202, 0.2550, 0.2579, 0.5970, 0.3219], + device='cuda:0'), in_proj_covar=tensor([0.0251, 0.0220, 0.0254, 0.0304, 0.0301, 0.0251, 0.0275, 0.0278], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 06:32:48,584 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170401.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:32:50,168 INFO [finetune.py:976] (0/7) Epoch 30, batch 4300, loss[loss=0.1531, simple_loss=0.2219, pruned_loss=0.04217, over 4734.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.235, pruned_loss=0.04399, over 956344.82 frames. ], batch size: 23, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:32:50,253 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170403.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:32:55,784 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=3.68 vs. limit=5.0 +2023-04-28 06:32:58,049 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6252, 1.6746, 1.8211, 2.0082, 2.1306, 1.7489, 1.3424, 1.9776], + device='cuda:0'), covar=tensor([0.0825, 0.1175, 0.0782, 0.0590, 0.0571, 0.0800, 0.0794, 0.0509], + device='cuda:0'), in_proj_covar=tensor([0.0182, 0.0203, 0.0184, 0.0173, 0.0178, 0.0179, 0.0150, 0.0177], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:33:08,695 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.2721, 2.9590, 2.6553, 2.8030, 2.1143, 2.4808, 2.6808, 1.9814], + device='cuda:0'), covar=tensor([0.1928, 0.0970, 0.0690, 0.1122, 0.3048, 0.1150, 0.1869, 0.2547], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0297, 0.0216, 0.0271, 0.0307, 0.0253, 0.0246, 0.0259], + device='cuda:0'), out_proj_covar=tensor([1.1132e-04, 1.1652e-04, 8.4400e-05, 1.0618e-04, 1.2336e-04, 9.9436e-05, + 9.8846e-05, 1.0175e-04], device='cuda:0') +2023-04-28 06:33:10,851 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=170433.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:33:16,833 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170442.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:33:22,271 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=170451.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:33:22,921 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5371, 3.4040, 0.8653, 1.7775, 1.9226, 2.3387, 1.9601, 1.1224], + device='cuda:0'), covar=tensor([0.1408, 0.0954, 0.2131, 0.1288, 0.1076, 0.1052, 0.1585, 0.1840], + device='cuda:0'), in_proj_covar=tensor([0.0117, 0.0237, 0.0136, 0.0120, 0.0131, 0.0153, 0.0117, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 06:33:23,431 INFO [finetune.py:976] (0/7) Epoch 30, batch 4350, loss[loss=0.1347, simple_loss=0.2093, pruned_loss=0.03002, over 4708.00 frames. ], tot_loss[loss=0.159, simple_loss=0.2321, pruned_loss=0.043, over 955084.22 frames. ], batch size: 23, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:33:28,850 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170461.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:33:52,313 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.003e+02 1.526e+02 1.784e+02 2.388e+02 5.022e+02, threshold=3.568e+02, percent-clipped=3.0 +2023-04-28 06:34:04,596 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=170490.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:34:23,442 INFO [finetune.py:976] (0/7) Epoch 30, batch 4400, loss[loss=0.1723, simple_loss=0.2315, pruned_loss=0.05655, over 4747.00 frames. ], tot_loss[loss=0.1596, simple_loss=0.2328, pruned_loss=0.04317, over 954583.60 frames. ], batch size: 23, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:34:47,061 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170522.0, num_to_drop=1, layers_to_drop={2} +2023-04-28 06:34:47,679 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6704, 1.9812, 1.8072, 1.9533, 1.5172, 1.7793, 1.7187, 1.4189], + device='cuda:0'), covar=tensor([0.1626, 0.0935, 0.0727, 0.1021, 0.3158, 0.0923, 0.1630, 0.2014], + device='cuda:0'), in_proj_covar=tensor([0.0281, 0.0299, 0.0217, 0.0273, 0.0308, 0.0255, 0.0248, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1198e-04, 1.1733e-04, 8.4848e-05, 1.0690e-04, 1.2398e-04, 9.9937e-05, + 9.9400e-05, 1.0232e-04], device='cuda:0') +2023-04-28 06:34:49,585 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7122, 1.0200, 1.6936, 2.1225, 1.7384, 1.6381, 1.6681, 1.6272], + device='cuda:0'), covar=tensor([0.3950, 0.6601, 0.5609, 0.5304, 0.5336, 0.6996, 0.6982, 0.8662], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0427, 0.0520, 0.0507, 0.0475, 0.0515, 0.0516, 0.0532], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:35:08,309 INFO [finetune.py:976] (0/7) Epoch 30, batch 4450, loss[loss=0.1953, simple_loss=0.2734, pruned_loss=0.05862, over 4853.00 frames. ], tot_loss[loss=0.1628, simple_loss=0.2367, pruned_loss=0.04443, over 953449.48 frames. ], batch size: 49, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:35:18,634 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170569.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:35:25,681 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9664, 1.4821, 5.0434, 4.7264, 4.3967, 4.8669, 4.4799, 4.4111], + device='cuda:0'), covar=tensor([0.7122, 0.5918, 0.1058, 0.1989, 0.1154, 0.1434, 0.1417, 0.1622], + device='cuda:0'), in_proj_covar=tensor([0.0307, 0.0305, 0.0402, 0.0405, 0.0345, 0.0411, 0.0313, 0.0360], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:35:26,201 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.085e+02 1.572e+02 1.800e+02 2.072e+02 3.635e+02, threshold=3.600e+02, percent-clipped=1.0 +2023-04-28 06:35:35,288 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7555, 4.0086, 0.7025, 2.0730, 2.1687, 2.7178, 2.3491, 1.0076], + device='cuda:0'), covar=tensor([0.1356, 0.0901, 0.2238, 0.1248, 0.1072, 0.1059, 0.1508, 0.2108], + device='cuda:0'), in_proj_covar=tensor([0.0116, 0.0235, 0.0135, 0.0119, 0.0130, 0.0151, 0.0116, 0.0117], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0005, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0003], + device='cuda:0') +2023-04-28 06:35:41,956 INFO [finetune.py:976] (0/7) Epoch 30, batch 4500, loss[loss=0.1949, simple_loss=0.2688, pruned_loss=0.06047, over 4908.00 frames. ], tot_loss[loss=0.1633, simple_loss=0.238, pruned_loss=0.04433, over 954698.53 frames. ], batch size: 37, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:35:59,491 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170630.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:36:15,352 INFO [finetune.py:976] (0/7) Epoch 30, batch 4550, loss[loss=0.1934, simple_loss=0.2539, pruned_loss=0.06639, over 4834.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2396, pruned_loss=0.04487, over 955038.55 frames. ], batch size: 30, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:36:32,815 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.9192, 2.5083, 2.1554, 2.4564, 1.7555, 2.1591, 2.1437, 1.6195], + device='cuda:0'), covar=tensor([0.1825, 0.1118, 0.0833, 0.0972, 0.3147, 0.1107, 0.1716, 0.2297], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0298, 0.0216, 0.0272, 0.0306, 0.0254, 0.0247, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1155e-04, 1.1708e-04, 8.4469e-05, 1.0652e-04, 1.2325e-04, 9.9921e-05, + 9.9126e-05, 1.0205e-04], device='cuda:0') +2023-04-28 06:36:33,303 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.995e+01 1.537e+02 1.792e+02 2.273e+02 3.619e+02, threshold=3.584e+02, percent-clipped=1.0 +2023-04-28 06:36:35,272 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6206, 2.6987, 2.2588, 2.3969, 2.6968, 2.2974, 3.6200, 2.1602], + device='cuda:0'), covar=tensor([0.3812, 0.2464, 0.4005, 0.3037, 0.1872, 0.2669, 0.1325, 0.4074], + device='cuda:0'), in_proj_covar=tensor([0.0342, 0.0358, 0.0429, 0.0354, 0.0388, 0.0377, 0.0372, 0.0425], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:36:47,849 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=170701.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:36:48,972 INFO [finetune.py:976] (0/7) Epoch 30, batch 4600, loss[loss=0.1784, simple_loss=0.2543, pruned_loss=0.0513, over 4830.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.239, pruned_loss=0.04447, over 954767.12 frames. ], batch size: 33, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:36:53,956 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1223, 2.6441, 2.3544, 2.6423, 2.0198, 2.3316, 2.4716, 1.9153], + device='cuda:0'), covar=tensor([0.1886, 0.1163, 0.0772, 0.0969, 0.3080, 0.1050, 0.1748, 0.2450], + device='cuda:0'), in_proj_covar=tensor([0.0280, 0.0299, 0.0216, 0.0272, 0.0307, 0.0255, 0.0247, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1167e-04, 1.1719e-04, 8.4464e-05, 1.0661e-04, 1.2343e-04, 9.9922e-05, + 9.9161e-05, 1.0213e-04], device='cuda:0') +2023-04-28 06:37:19,720 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=170749.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:37:22,624 INFO [finetune.py:976] (0/7) Epoch 30, batch 4650, loss[loss=0.1687, simple_loss=0.2342, pruned_loss=0.05155, over 4917.00 frames. ], tot_loss[loss=0.1627, simple_loss=0.2368, pruned_loss=0.0443, over 953655.27 frames. ], batch size: 43, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:37:39,681 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.80 vs. limit=2.0 +2023-04-28 06:37:39,986 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.849e+01 1.520e+02 1.798e+02 2.217e+02 4.405e+02, threshold=3.597e+02, percent-clipped=2.0 +2023-04-28 06:37:44,944 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8377, 1.3137, 1.5020, 1.5367, 1.9255, 1.6723, 1.3145, 1.4664], + device='cuda:0'), covar=tensor([0.1612, 0.1435, 0.1809, 0.1325, 0.0844, 0.1445, 0.1923, 0.2220], + device='cuda:0'), in_proj_covar=tensor([0.0317, 0.0309, 0.0352, 0.0287, 0.0325, 0.0308, 0.0302, 0.0379], + device='cuda:0'), out_proj_covar=tensor([6.4365e-05, 6.3017e-05, 7.3577e-05, 5.7289e-05, 6.6139e-05, 6.3660e-05, + 6.2225e-05, 7.9939e-05], device='cuda:0') +2023-04-28 06:37:55,525 INFO [finetune.py:976] (0/7) Epoch 30, batch 4700, loss[loss=0.1451, simple_loss=0.2178, pruned_loss=0.03623, over 4836.00 frames. ], tot_loss[loss=0.1598, simple_loss=0.2333, pruned_loss=0.04318, over 952980.23 frames. ], batch size: 41, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:38:05,045 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170817.0, num_to_drop=1, layers_to_drop={3} +2023-04-28 06:38:29,291 INFO [finetune.py:976] (0/7) Epoch 30, batch 4750, loss[loss=0.1697, simple_loss=0.2344, pruned_loss=0.05245, over 4757.00 frames. ], tot_loss[loss=0.1589, simple_loss=0.2317, pruned_loss=0.04303, over 951184.69 frames. ], batch size: 28, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:38:47,637 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.061e+02 1.520e+02 1.829e+02 2.099e+02 3.984e+02, threshold=3.658e+02, percent-clipped=1.0 +2023-04-28 06:38:48,554 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.55 vs. limit=5.0 +2023-04-28 06:38:48,934 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([4.4559, 4.3710, 3.0235, 5.1101, 4.4542, 4.4089, 1.8537, 4.4227], + device='cuda:0'), covar=tensor([0.1415, 0.1113, 0.3727, 0.0841, 0.3154, 0.1465, 0.5786, 0.1841], + device='cuda:0'), in_proj_covar=tensor([0.0250, 0.0220, 0.0254, 0.0302, 0.0300, 0.0251, 0.0275, 0.0276], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 06:39:16,797 INFO [finetune.py:976] (0/7) Epoch 30, batch 4800, loss[loss=0.1938, simple_loss=0.2727, pruned_loss=0.05743, over 4858.00 frames. ], tot_loss[loss=0.1613, simple_loss=0.2348, pruned_loss=0.04392, over 952885.31 frames. ], batch size: 44, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:39:29,713 INFO [scaling.py:679] (0/7) Whitening: num_groups=1, num_channels=384, metric=4.73 vs. limit=5.0 +2023-04-28 06:39:30,223 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=192, metric=1.52 vs. limit=2.0 +2023-04-28 06:39:45,897 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=170925.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:39:47,774 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=170928.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:40:21,166 INFO [finetune.py:976] (0/7) Epoch 30, batch 4850, loss[loss=0.1813, simple_loss=0.256, pruned_loss=0.05328, over 4898.00 frames. ], tot_loss[loss=0.1626, simple_loss=0.2365, pruned_loss=0.04435, over 950850.70 frames. ], batch size: 32, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:40:52,179 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 8.978e+01 1.534e+02 1.869e+02 2.233e+02 4.849e+02, threshold=3.739e+02, percent-clipped=1.0 +2023-04-28 06:40:59,534 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=170989.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:41:19,848 INFO [finetune.py:976] (0/7) Epoch 30, batch 4900, loss[loss=0.2137, simple_loss=0.2838, pruned_loss=0.07177, over 4812.00 frames. ], tot_loss[loss=0.165, simple_loss=0.2389, pruned_loss=0.04553, over 952665.42 frames. ], batch size: 45, lr: 2.79e-03, grad_scale: 32.0 +2023-04-28 06:41:50,445 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.3096, 1.6890, 1.4926, 1.9202, 1.7099, 1.9972, 1.4978, 3.7181], + device='cuda:0'), covar=tensor([0.0628, 0.0799, 0.0817, 0.1194, 0.0655, 0.0500, 0.0735, 0.0147], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0038, 0.0039, 0.0043, 0.0040, 0.0037, 0.0038, 0.0054], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0014, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 06:42:25,534 INFO [finetune.py:976] (0/7) Epoch 30, batch 4950, loss[loss=0.1776, simple_loss=0.2496, pruned_loss=0.05278, over 4869.00 frames. ], tot_loss[loss=0.1661, simple_loss=0.2406, pruned_loss=0.04584, over 953438.77 frames. ], batch size: 34, lr: 2.78e-03, grad_scale: 64.0 +2023-04-28 06:42:45,068 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.189e+02 1.630e+02 1.843e+02 2.166e+02 3.655e+02, threshold=3.685e+02, percent-clipped=0.0 +2023-04-28 06:42:52,368 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171092.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:42:54,221 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1359, 0.7662, 0.9577, 0.9226, 1.2203, 1.0213, 0.9977, 0.9915], + device='cuda:0'), covar=tensor([0.1632, 0.1477, 0.1857, 0.1535, 0.0983, 0.1440, 0.1625, 0.2238], + device='cuda:0'), in_proj_covar=tensor([0.0314, 0.0308, 0.0349, 0.0286, 0.0325, 0.0307, 0.0300, 0.0375], + device='cuda:0'), out_proj_covar=tensor([6.3897e-05, 6.2909e-05, 7.2964e-05, 5.7016e-05, 6.6171e-05, 6.3621e-05, + 6.1827e-05, 7.9169e-05], device='cuda:0') +2023-04-28 06:42:59,425 INFO [finetune.py:976] (0/7) Epoch 30, batch 5000, loss[loss=0.1428, simple_loss=0.2177, pruned_loss=0.03391, over 4706.00 frames. ], tot_loss[loss=0.1655, simple_loss=0.2394, pruned_loss=0.04585, over 955489.72 frames. ], batch size: 23, lr: 2.78e-03, grad_scale: 64.0 +2023-04-28 06:43:09,463 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171117.0, num_to_drop=1, layers_to_drop={0} +2023-04-28 06:43:27,289 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.5804, 1.8914, 2.0788, 2.1482, 1.9944, 2.0391, 2.1024, 2.0541], + device='cuda:0'), covar=tensor([0.3781, 0.5472, 0.4327, 0.4482, 0.5342, 0.6821, 0.5336, 0.4736], + device='cuda:0'), in_proj_covar=tensor([0.0341, 0.0373, 0.0330, 0.0342, 0.0351, 0.0393, 0.0361, 0.0334], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 06:43:33,016 INFO [finetune.py:976] (0/7) Epoch 30, batch 5050, loss[loss=0.1324, simple_loss=0.209, pruned_loss=0.02794, over 4935.00 frames. ], tot_loss[loss=0.1634, simple_loss=0.2366, pruned_loss=0.04512, over 953705.42 frames. ], batch size: 38, lr: 2.78e-03, grad_scale: 64.0 +2023-04-28 06:43:33,149 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171153.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:43:40,801 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=171165.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:43:52,462 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.935e+01 1.498e+02 1.792e+02 2.110e+02 3.798e+02, threshold=3.585e+02, percent-clipped=1.0 +2023-04-28 06:44:02,205 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171196.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:44:06,508 INFO [finetune.py:976] (0/7) Epoch 30, batch 5100, loss[loss=0.1546, simple_loss=0.2239, pruned_loss=0.0426, over 4900.00 frames. ], tot_loss[loss=0.1615, simple_loss=0.2342, pruned_loss=0.04442, over 954875.50 frames. ], batch size: 43, lr: 2.78e-03, grad_scale: 64.0 +2023-04-28 06:44:20,932 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-28 06:44:21,409 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171225.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:44:40,213 INFO [finetune.py:976] (0/7) Epoch 30, batch 5150, loss[loss=0.1859, simple_loss=0.2579, pruned_loss=0.057, over 4829.00 frames. ], tot_loss[loss=0.1614, simple_loss=0.2342, pruned_loss=0.04435, over 955467.34 frames. ], batch size: 33, lr: 2.78e-03, grad_scale: 64.0 +2023-04-28 06:44:42,776 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171257.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:44:53,810 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=171273.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:45:05,608 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.060e+02 1.555e+02 1.961e+02 2.367e+02 3.631e+02, threshold=3.922e+02, percent-clipped=1.0 +2023-04-28 06:45:07,338 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171284.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:45:35,546 INFO [finetune.py:976] (0/7) Epoch 30, batch 5200, loss[loss=0.2509, simple_loss=0.3143, pruned_loss=0.09372, over 4156.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2377, pruned_loss=0.04561, over 954124.39 frames. ], batch size: 65, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:46:31,229 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171345.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:46:36,594 INFO [finetune.py:976] (0/7) Epoch 30, batch 5250, loss[loss=0.1704, simple_loss=0.232, pruned_loss=0.05444, over 4231.00 frames. ], tot_loss[loss=0.1645, simple_loss=0.2386, pruned_loss=0.0452, over 955046.23 frames. ], batch size: 18, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:47:18,739 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.056e+02 1.554e+02 1.778e+02 2.227e+02 5.005e+02, threshold=3.556e+02, percent-clipped=1.0 +2023-04-28 06:47:43,263 INFO [finetune.py:976] (0/7) Epoch 30, batch 5300, loss[loss=0.1795, simple_loss=0.2578, pruned_loss=0.05066, over 4874.00 frames. ], tot_loss[loss=0.1654, simple_loss=0.2397, pruned_loss=0.04553, over 954627.35 frames. ], batch size: 32, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:47:50,523 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171406.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:48:41,623 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171448.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:48:44,664 INFO [finetune.py:976] (0/7) Epoch 30, batch 5350, loss[loss=0.1904, simple_loss=0.2619, pruned_loss=0.05945, over 4817.00 frames. ], tot_loss[loss=0.1647, simple_loss=0.2396, pruned_loss=0.04496, over 956239.38 frames. ], batch size: 33, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:48:56,788 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.20 vs. limit=2.0 +2023-04-28 06:49:03,280 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.027e+02 1.512e+02 1.839e+02 2.201e+02 3.854e+02, threshold=3.679e+02, percent-clipped=1.0 +2023-04-28 06:49:13,273 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1162, 2.8492, 2.3251, 2.3103, 1.5723, 1.6496, 2.5604, 1.5460], + device='cuda:0'), covar=tensor([0.1567, 0.1371, 0.1194, 0.1416, 0.2107, 0.1671, 0.0737, 0.1809], + device='cuda:0'), in_proj_covar=tensor([0.0198, 0.0209, 0.0170, 0.0204, 0.0201, 0.0187, 0.0156, 0.0188], + device='cuda:0'), out_proj_covar=tensor([0.0003, 0.0003, 0.0002, 0.0003, 0.0003, 0.0003, 0.0002, 0.0003], + device='cuda:0') +2023-04-28 06:49:15,604 INFO [zipformer.py:1188] (0/7) warmup_begin=2666.7, warmup_end=3333.3, batch_count=171498.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:49:18,573 INFO [finetune.py:976] (0/7) Epoch 30, batch 5400, loss[loss=0.1375, simple_loss=0.2108, pruned_loss=0.03207, over 4817.00 frames. ], tot_loss[loss=0.1639, simple_loss=0.2383, pruned_loss=0.04473, over 956758.82 frames. ], batch size: 40, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:49:38,424 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1201, 1.9261, 2.1089, 2.5194, 2.6510, 2.0329, 1.8159, 2.2328], + device='cuda:0'), covar=tensor([0.0858, 0.1046, 0.0672, 0.0544, 0.0489, 0.0836, 0.0690, 0.0553], + device='cuda:0'), in_proj_covar=tensor([0.0181, 0.0201, 0.0182, 0.0171, 0.0176, 0.0178, 0.0149, 0.0176], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:49:51,786 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171552.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:49:52,349 INFO [finetune.py:976] (0/7) Epoch 30, batch 5450, loss[loss=0.1375, simple_loss=0.2097, pruned_loss=0.03265, over 3951.00 frames. ], tot_loss[loss=0.1619, simple_loss=0.2355, pruned_loss=0.04419, over 956914.41 frames. ], batch size: 17, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:49:56,149 INFO [zipformer.py:1188] (0/7) warmup_begin=3333.3, warmup_end=4000.0, batch_count=171559.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:49:58,027 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.24 vs. limit=2.0 +2023-04-28 06:50:10,997 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 9.901e+01 1.444e+02 1.753e+02 2.019e+02 4.846e+02, threshold=3.505e+02, percent-clipped=2.0 +2023-04-28 06:50:12,290 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171584.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:50:25,852 INFO [finetune.py:976] (0/7) Epoch 30, batch 5500, loss[loss=0.1083, simple_loss=0.1872, pruned_loss=0.01475, over 4734.00 frames. ], tot_loss[loss=0.1592, simple_loss=0.2323, pruned_loss=0.04308, over 958758.61 frames. ], batch size: 23, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:50:30,739 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.8477, 1.8424, 1.1888, 1.4913, 2.2078, 1.6558, 1.5504, 1.5737], + device='cuda:0'), covar=tensor([0.0461, 0.0332, 0.0278, 0.0509, 0.0236, 0.0450, 0.0445, 0.0517], + device='cuda:0'), in_proj_covar=tensor([0.0027, 0.0023, 0.0021, 0.0028, 0.0019, 0.0027, 0.0027, 0.0028], + device='cuda:0'), out_proj_covar=tensor([0.0052, 0.0046, 0.0039, 0.0053, 0.0039, 0.0051, 0.0051, 0.0053], + device='cuda:0') +2023-04-28 06:50:33,188 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.5571, 1.2462, 4.4460, 4.1947, 3.8653, 4.2630, 4.1569, 3.9669], + device='cuda:0'), covar=tensor([0.7413, 0.6067, 0.1117, 0.1817, 0.1202, 0.2059, 0.1228, 0.1620], + device='cuda:0'), in_proj_covar=tensor([0.0311, 0.0307, 0.0407, 0.0410, 0.0347, 0.0417, 0.0317, 0.0366], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0002, 0.0002, 0.0001, 0.0002, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:50:45,015 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=171632.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:50:57,980 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.1012, 1.4085, 1.3434, 1.6714, 1.4228, 1.7680, 1.2744, 3.3812], + device='cuda:0'), covar=tensor([0.0731, 0.1130, 0.0987, 0.1364, 0.0895, 0.0663, 0.1021, 0.0241], + device='cuda:0'), in_proj_covar=tensor([0.0037, 0.0039, 0.0039, 0.0043, 0.0040, 0.0038, 0.0038, 0.0055], + device='cuda:0'), out_proj_covar=tensor([0.0012, 0.0013, 0.0014, 0.0015, 0.0014, 0.0013, 0.0013, 0.0016], + device='cuda:0') +2023-04-28 06:50:59,647 INFO [finetune.py:976] (0/7) Epoch 30, batch 5550, loss[loss=0.1421, simple_loss=0.2119, pruned_loss=0.03614, over 4824.00 frames. ], tot_loss[loss=0.1612, simple_loss=0.234, pruned_loss=0.04417, over 957350.83 frames. ], batch size: 25, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:51:08,821 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([2.1737, 1.4355, 1.7340, 1.7958, 1.7390, 1.8002, 1.7448, 1.7599], + device='cuda:0'), covar=tensor([0.3988, 0.4966, 0.4020, 0.4145, 0.5161, 0.6707, 0.4700, 0.4198], + device='cuda:0'), in_proj_covar=tensor([0.0343, 0.0375, 0.0331, 0.0345, 0.0352, 0.0394, 0.0362, 0.0335], + device='cuda:0'), out_proj_covar=tensor([0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002, 0.0002], + device='cuda:0') +2023-04-28 06:51:38,775 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.059e+02 1.632e+02 1.883e+02 2.258e+02 4.653e+02, threshold=3.766e+02, percent-clipped=3.0 +2023-04-28 06:51:40,139 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.6694, 1.9967, 1.7674, 1.9702, 1.4715, 1.8310, 1.7335, 1.4373], + device='cuda:0'), covar=tensor([0.1411, 0.0894, 0.0668, 0.0833, 0.2750, 0.0723, 0.1251, 0.1620], + device='cuda:0'), in_proj_covar=tensor([0.0282, 0.0299, 0.0217, 0.0273, 0.0308, 0.0255, 0.0248, 0.0260], + device='cuda:0'), out_proj_covar=tensor([1.1235e-04, 1.1752e-04, 8.4992e-05, 1.0687e-04, 1.2381e-04, 1.0007e-04, + 9.9552e-05, 1.0218e-04], device='cuda:0') +2023-04-28 06:52:00,699 INFO [zipformer.py:1188] (0/7) warmup_begin=1333.3, warmup_end=2000.0, batch_count=171701.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:52:01,823 INFO [finetune.py:976] (0/7) Epoch 30, batch 5600, loss[loss=0.1889, simple_loss=0.2685, pruned_loss=0.05465, over 4932.00 frames. ], tot_loss[loss=0.1629, simple_loss=0.2369, pruned_loss=0.04444, over 954896.85 frames. ], batch size: 33, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:52:34,446 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.26 vs. limit=2.0 +2023-04-28 06:52:55,572 INFO [zipformer.py:1188] (0/7) warmup_begin=2000.0, warmup_end=2666.7, batch_count=171748.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:52:58,362 INFO [finetune.py:976] (0/7) Epoch 30, batch 5650, loss[loss=0.1918, simple_loss=0.2663, pruned_loss=0.05858, over 4908.00 frames. ], tot_loss[loss=0.1644, simple_loss=0.2396, pruned_loss=0.04457, over 955776.76 frames. ], batch size: 36, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:53:36,441 INFO [zipformer.py:2441] (0/7) attn_weights_entropy = tensor([1.7961, 1.1478, 1.7139, 2.1372, 1.8663, 1.6978, 1.7370, 1.7292], + device='cuda:0'), covar=tensor([0.4319, 0.6323, 0.5934, 0.5458, 0.5797, 0.8101, 0.7615, 0.7199], + device='cuda:0'), in_proj_covar=tensor([0.0448, 0.0426, 0.0520, 0.0509, 0.0474, 0.0517, 0.0516, 0.0532], + device='cuda:0'), out_proj_covar=tensor([0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001, 0.0001], + device='cuda:0') +2023-04-28 06:53:37,491 INFO [optim.py:369] (0/7) Clipping_scale=2.0, grad-norm quartiles 1.071e+02 1.430e+02 1.735e+02 1.948e+02 3.086e+02, threshold=3.470e+02, percent-clipped=0.0 +2023-04-28 06:53:38,920 INFO [scaling.py:679] (0/7) Whitening: num_groups=8, num_channels=96, metric=1.68 vs. limit=2.0 +2023-04-28 06:53:50,969 INFO [zipformer.py:1188] (0/7) warmup_begin=666.7, warmup_end=1333.3, batch_count=171796.0, num_to_drop=0, layers_to_drop=set() +2023-04-28 06:54:00,717 INFO [finetune.py:976] (0/7) Epoch 30, batch 5700, loss[loss=0.1514, simple_loss=0.2026, pruned_loss=0.05008, over 4202.00 frames. ], tot_loss[loss=0.1624, simple_loss=0.2361, pruned_loss=0.04433, over 938412.46 frames. ], batch size: 18, lr: 2.78e-03, grad_scale: 32.0 +2023-04-28 06:54:33,358 INFO [checkpoint.py:75] (0/7) Saving checkpoint to pruned_transducer_stateless7_streaming/exp2/epoch-30.pt +2023-04-28 06:54:36,951 INFO [finetune.py:1241] (0/7) Done!