File size: 177 Bytes
2366e36
 
 
 
 
 
1
2
3
4
5
6
7
# optimizer
optimizer = dict(type='Adam', lr=1e-4)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(policy='step', step=[200, 400])
total_epochs = 600