optimizer_swin_1x.yml 405 B

12345678910111213141516171819202122
  1. epoch: 12
  2. LearningRate:
  3. base_lr: 0.0001
  4. schedulers:
  5. - !PiecewiseDecay
  6. gamma: 0.1
  7. milestones: [8, 11]
  8. - !LinearWarmup
  9. start_factor: 0.1
  10. steps: 1000
  11. OptimizerBuilder:
  12. clip_grad_by_norm: 1.0
  13. optimizer:
  14. type: AdamW
  15. weight_decay: 0.05
  16. param_groups:
  17. -
  18. params: ['absolute_pos_embed', 'relative_position_bias_table', 'norm']
  19. weight_decay: 0.