optimizer_70e.yml 204 B

1234567891011121314
  1. epoch: 70
  2. LearningRate:
  3. base_lr: 0.000125
  4. schedulers:
  5. - !PiecewiseDecay
  6. gamma: 0.1
  7. milestones: [60]
  8. use_warmup: False
  9. OptimizerBuilder:
  10. optimizer:
  11. type: Adam
  12. regularizer: NULL