diff --git a/configs/pascal_voc/ssd300_voc.py b/configs/pascal_voc/ssd300_voc.py index b98d36e70be1271b223152c0460be2972d906c7d..79d60e95c740636610d266a681ba4f48d6b2c66b 100644 --- a/configs/pascal_voc/ssd300_voc.py +++ b/configs/pascal_voc/ssd300_voc.py @@ -51,7 +51,7 @@ data = dict( workers_per_gpu=2, train=dict( type='RepeatDataset', - times=20, + times=10, dataset=dict( type=dataset_type, ann_file=[ @@ -113,7 +113,7 @@ lr_config = dict( warmup='linear', warmup_iters=500, warmup_ratio=1.0 / 3, - step=[8, 11]) + step=[16, 20]) checkpoint_config = dict(interval=1) # yapf:disable log_config = dict( @@ -124,7 +124,7 @@ log_config = dict( ]) # yapf:enable # runtime settings -total_epochs = 12 +total_epochs = 24 dist_params = dict(backend='nccl') log_level = 'INFO' work_dir = './work_dirs/ssd300_voc' diff --git a/configs/pascal_voc/ssd512_voc.py b/configs/pascal_voc/ssd512_voc.py index daae45c5cff565c51ec413d849e34533094e7cdb..25e1369b9e84edb1cea7c77bcce1aa12128baa95 100644 --- a/configs/pascal_voc/ssd512_voc.py +++ b/configs/pascal_voc/ssd512_voc.py @@ -51,7 +51,7 @@ data = dict( workers_per_gpu=2, train=dict( type='RepeatDataset', - times=20, + times=10, dataset=dict( type=dataset_type, ann_file=[ @@ -113,7 +113,7 @@ lr_config = dict( warmup='linear', warmup_iters=500, warmup_ratio=1.0 / 3, - step=[8, 11]) + step=[16, 20]) checkpoint_config = dict(interval=1) # yapf:disable log_config = dict( @@ -124,7 +124,7 @@ log_config = dict( ]) # yapf:enable # runtime settings -total_epochs = 12 +total_epochs = 24 dist_params = dict(backend='nccl') log_level = 'INFO' work_dir = './work_dirs/ssd512_voc' diff --git a/configs/ssd300_coco.py b/configs/ssd300_coco.py index 575af5871eae8ad1dbaaef14cd02d04e0579a0ec..781d2df0b927791d16f30753a1cfabd6011e5963 100644 --- a/configs/ssd300_coco.py +++ b/configs/ssd300_coco.py @@ -51,7 +51,7 @@ data = dict( workers_per_gpu=3, train=dict( type='RepeatDataset', - times=10, + times=5, dataset=dict( type=dataset_type, ann_file=data_root + 'annotations/instances_train2017.json', @@ -110,7 +110,7 @@ lr_config = dict( warmup='linear', warmup_iters=500, warmup_ratio=1.0 / 3, - step=[8, 11]) + step=[16, 22]) checkpoint_config = dict(interval=1) # yapf:disable log_config = dict( @@ -121,7 +121,7 @@ log_config = dict( ]) # yapf:enable # runtime settings -total_epochs = 12 +total_epochs = 24 dist_params = dict(backend='nccl') log_level = 'INFO' work_dir = './work_dirs/ssd300_coco' diff --git a/configs/ssd512_coco.py b/configs/ssd512_coco.py index db1152abb24e55a4458aab01e6eb31dd1f008794..1d9b35299eafc8d06b4c70a4ab27b4fe2a981daf 100644 --- a/configs/ssd512_coco.py +++ b/configs/ssd512_coco.py @@ -51,7 +51,7 @@ data = dict( workers_per_gpu=3, train=dict( type='RepeatDataset', - times=10, + times=5, dataset=dict( type=dataset_type, ann_file=data_root + 'annotations/instances_train2017.json', @@ -110,7 +110,7 @@ lr_config = dict( warmup='linear', warmup_iters=500, warmup_ratio=1.0 / 3, - step=[8, 11]) + step=[16, 22]) checkpoint_config = dict(interval=1) # yapf:disable log_config = dict( @@ -121,7 +121,7 @@ log_config = dict( ]) # yapf:enable # runtime settings -total_epochs = 12 +total_epochs = 24 dist_params = dict(backend='nccl') log_level = 'INFO' work_dir = './work_dirs/ssd512_coco' diff --git a/mmdet/core/anchor/anchor_generator.py b/mmdet/core/anchor/anchor_generator.py index 84600be331e52d9a64f70e2cb43696b82801bf0e..8995ea61d75ad38fa747e63deafd4b048a410d76 100644 --- a/mmdet/core/anchor/anchor_generator.py +++ b/mmdet/core/anchor/anchor_generator.py @@ -3,11 +3,12 @@ import torch class AnchorGenerator(object): - def __init__(self, base_size, scales, ratios, scale_major=True): + def __init__(self, base_size, scales, ratios, scale_major=True, ctr=None): self.base_size = base_size self.scales = torch.Tensor(scales) self.ratios = torch.Tensor(ratios) self.scale_major = scale_major + self.ctr = ctr self.base_anchors = self.gen_base_anchors() @property @@ -15,13 +16,13 @@ class AnchorGenerator(object): return self.base_anchors.size(0) def gen_base_anchors(self): - base_anchor = torch.Tensor( - [0, 0, self.base_size - 1, self.base_size - 1]) - - w = base_anchor[2] - base_anchor[0] + 1 - h = base_anchor[3] - base_anchor[1] + 1 - x_ctr = base_anchor[0] + 0.5 * (w - 1) - y_ctr = base_anchor[1] + 0.5 * (h - 1) + w = self.base_size + h = self.base_size + if self.ctr is None: + x_ctr = 0.5 * (w - 1) + y_ctr = 0.5 * (h - 1) + else: + x_ctr, y_ctr = self.ctr h_ratios = torch.sqrt(self.ratios) w_ratios = 1 / h_ratios diff --git a/mmdet/models/single_stage_heads/ssd_head.py b/mmdet/models/single_stage_heads/ssd_head.py index 20aaaab83f25ced9eca796f372166086522cc2bc..ad3e78a399f93b289b338b77b04389b8a1bbcc0d 100644 --- a/mmdet/models/single_stage_heads/ssd_head.py +++ b/mmdet/models/single_stage_heads/ssd_head.py @@ -72,12 +72,14 @@ class SSDHead(nn.Module): self.anchor_strides = anchor_strides for k in range(len(anchor_strides)): base_size = min_sizes[k] + stride = anchor_strides[k] + ctr = ((stride - 1) / 2., (stride - 1) / 2.) scales = [1., np.sqrt(max_sizes[k] / min_sizes[k])] ratios = [1.] for r in anchor_ratios[k]: ratios += [1 / r, r] # 4 or 6 ratio anchor_generator = AnchorGenerator( - base_size, scales, ratios, scale_major=False) + base_size, scales, ratios, scale_major=False, ctr=ctr) indices = list(range(len(ratios))) indices.insert(1, len(indices)) anchor_generator.base_anchors = torch.index_select(