optimizer_140e.yml 208 B

1234567891011121314
  1. epoch: 140
  2. LearningRate:
  3. base_lr: 0.0005
  4. schedulers:
  5. - !PiecewiseDecay
  6. gamma: 0.1
  7. milestones: [90, 120]
  8. use_warmup: False
  9. OptimizerBuilder:
  10. optimizer:
  11. type: Adam
  12. regularizer: NULL