mask-rcnn_swin-t-p4-w7_fpn_1x_coco.py 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960
  1. _base_ = [
  2. '../_base_/models/mask-rcnn_r50_fpn.py',
  3. '../_base_/datasets/coco_instance.py',
  4. '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
  5. ]
  6. pretrained = 'https://github.com/SwinTransformer/storage/releases/download/v1.0.0/swin_tiny_patch4_window7_224.pth' # noqa
  7. model = dict(
  8. type='MaskRCNN',
  9. backbone=dict(
  10. _delete_=True,
  11. type='SwinTransformer',
  12. embed_dims=96,
  13. depths=[2, 2, 6, 2],
  14. num_heads=[3, 6, 12, 24],
  15. window_size=7,
  16. mlp_ratio=4,
  17. qkv_bias=True,
  18. qk_scale=None,
  19. drop_rate=0.,
  20. attn_drop_rate=0.,
  21. drop_path_rate=0.2,
  22. patch_norm=True,
  23. out_indices=(0, 1, 2, 3),
  24. with_cp=False,
  25. convert_weights=True,
  26. init_cfg=dict(type='Pretrained', checkpoint=pretrained)),
  27. neck=dict(in_channels=[96, 192, 384, 768]))
  28. max_epochs = 12
  29. train_cfg = dict(max_epochs=max_epochs)
  30. # learning rate
  31. param_scheduler = [
  32. dict(
  33. type='LinearLR', start_factor=0.001, by_epoch=False, begin=0,
  34. end=1000),
  35. dict(
  36. type='MultiStepLR',
  37. begin=0,
  38. end=max_epochs,
  39. by_epoch=True,
  40. milestones=[8, 11],
  41. gamma=0.1)
  42. ]
  43. # optimizer
  44. optim_wrapper = dict(
  45. type='OptimWrapper',
  46. paramwise_cfg=dict(
  47. custom_keys={
  48. 'absolute_pos_embed': dict(decay_mult=0.),
  49. 'relative_position_bias_table': dict(decay_mult=0.),
  50. 'norm': dict(decay_mult=0.)
  51. }),
  52. optimizer=dict(
  53. _delete_=True,
  54. type='AdamW',
  55. lr=0.0001,
  56. betas=(0.9, 0.999),
  57. weight_decay=0.05))