optimizer_70e.yml 260 B

1234567891011121314151617
  1. epoch: 70
  2. LearningRate:
  3. base_lr: 0.05
  4. schedulers:
  5. - !PiecewiseDecay
  6. milestones: [48, 60]
  7. gamma: [0.1, 0.1]
  8. use_warmup: false
  9. OptimizerBuilder:
  10. optimizer:
  11. momentum: 0.9
  12. type: Momentum
  13. regularizer:
  14. factor: 0.0005
  15. type: L2