epoch: 650 LearningRate: base_lr: 0.005 schedulers: - !PiecewiseDecay gamma: 0.1 milestones: - 430 - 540 - 610 - !LinearWarmup start_factor: 0. steps: 4000 OptimizerBuilder: optimizer: momentum: 0.9 type: Momentum regularizer: factor: 0.0005 type: L2