optimizer_30e.yml 296 B

1234567891011121314151617181920
  1. epoch: 30
  2. LearningRate:
  3. base_lr: 0.01
  4. schedulers:
  5. - !PiecewiseDecay
  6. gamma: 0.1
  7. milestones: [15, 22]
  8. use_warmup: True
  9. - !ExpWarmup
  10. steps: 1000
  11. power: 4
  12. OptimizerBuilder:
  13. optimizer:
  14. momentum: 0.9
  15. type: Momentum
  16. regularizer:
  17. factor: 0.0001
  18. type: L2