CosyVoice commited on
Commit
793a248
1 Parent(s): 6a3e442

add constant lr scheduler

Browse files
cosyvoice/utils/scheduler.py CHANGED
@@ -715,3 +715,25 @@ class NoamHoldAnnealing(WarmupHoldPolicy):
715
 
716
  def set_step(self, step: int):
717
  self.last_epoch = step
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
715
 
716
  def set_step(self, step: int):
717
  self.last_epoch = step
718
+
719
+
720
+ class ConstantLR(_LRScheduler):
721
+ """The ConstantLR scheduler
722
+
723
+ This scheduler keeps a constant lr
724
+
725
+ """
726
+
727
+ def __init__(
728
+ self,
729
+ optimizer: torch.optim.Optimizer,
730
+ ):
731
+ # __init__() must be invoked before setting field
732
+ # because step() is also invoked in __init__()
733
+ super().__init__(optimizer)
734
+
735
+ def get_lr(self):
736
+ return self.base_lrs
737
+
738
+ def set_step(self, step: int):
739
+ self.last_epoch = step
cosyvoice/utils/train_utils.py CHANGED
@@ -34,7 +34,7 @@ from torch.nn.utils import clip_grad_norm_
34
  from deepspeed.runtime.zero.stage_1_and_2 import estimate_zero2_model_states_mem_needs_all_live
35
 
36
  from cosyvoice.dataset.dataset import Dataset
37
- from cosyvoice.utils.scheduler import WarmupLR, NoamHoldAnnealing
38
 
39
 
40
  def init_distributed(args):
@@ -122,6 +122,9 @@ def init_optimizer_and_scheduler(args, configs, model):
122
  elif configs['train_conf']['scheduler'] == 'NoamHoldAnnealing':
123
  scheduler_type = NoamHoldAnnealing
124
  scheduler = NoamHoldAnnealing(optimizer, **configs['train_conf']['scheduler_conf'])
 
 
 
125
  else:
126
  raise ValueError("unknown scheduler: " + configs['train_conf'])
127
 
 
34
  from deepspeed.runtime.zero.stage_1_and_2 import estimate_zero2_model_states_mem_needs_all_live
35
 
36
  from cosyvoice.dataset.dataset import Dataset
37
+ from cosyvoice.utils.scheduler import WarmupLR, NoamHoldAnnealing, ConstantLR
38
 
39
 
40
  def init_distributed(args):
 
122
  elif configs['train_conf']['scheduler'] == 'NoamHoldAnnealing':
123
  scheduler_type = NoamHoldAnnealing
124
  scheduler = NoamHoldAnnealing(optimizer, **configs['train_conf']['scheduler_conf'])
125
+ elif configs['train_conf']['scheduler'] == 'constantlr':
126
+ scheduler_type = ConstantLR
127
+ scheduler = ConstantLR(optimizer)
128
  else:
129
  raise ValueError("unknown scheduler: " + configs['train_conf'])
130
 
examples/libritts/cosyvoice/conf/cosyvoice.yaml CHANGED
@@ -186,8 +186,8 @@ data_pipeline: [
186
  train_conf:
187
  optim: adam
188
  optim_conf:
189
- lr: 0.001
190
- scheduler: warmuplr
191
  scheduler_conf:
192
  warmup_steps: 2500
193
  max_epoch: 200
 
186
  train_conf:
187
  optim: adam
188
  optim_conf:
189
+ lr: 0.001 # change to 1e-5 during sft
190
+ scheduler: warmuplr # change to constantlr during sft
191
  scheduler_conf:
192
  warmup_steps: 2500
193
  max_epoch: 200
tools/extract_embedding.py CHANGED
@@ -54,7 +54,7 @@ def main(args):
54
  spk2embedding[spk] = []
55
  spk2embedding[spk].append(embedding)
56
  for k, v in spk2embedding.items():
57
- spk2embedding[k] = torch.tensor(v).mean(dim=0, keepdim=True).tolist()
58
 
59
  torch.save(utt2embedding, '{}/utt2embedding.pt'.format(args.dir))
60
  torch.save(spk2embedding, '{}/spk2embedding.pt'.format(args.dir))
 
54
  spk2embedding[spk] = []
55
  spk2embedding[spk].append(embedding)
56
  for k, v in spk2embedding.items():
57
+ spk2embedding[k] = torch.tensor(v).mean(dim=0).tolist()
58
 
59
  torch.save(utt2embedding, '{}/utt2embedding.pt'.format(args.dir))
60
  torch.save(spk2embedding, '{}/spk2embedding.pt'.format(args.dir))