optimizer_120e.yml 281 B

1234567891011121314151617
  1. epoch: 120
  2. LearningRate:
  3. base_lr: 0.001
  4. schedulers:
  5. - !PiecewiseDecay
  6. milestones: [40, 60, 80, 100]
  7. gamma: [0.5, 0.5, 0.4, 0.1]
  8. use_warmup: false
  9. OptimizerBuilder:
  10. optimizer:
  11. momentum: 0.0
  12. type: RMSProp
  13. regularizer:
  14. factor: 0.00005
  15. type: L2