Skip to content
This repository was archived by the owner on Apr 17, 2023. It is now read-only.

Commit 2f4fb6b

Browse files
authored
Tiling Module (#40)
* update tiling * update det stage * update * remove redundant configure hyperparams * remove configure_hyperparams * update * update * update evaluation IoU to 0:5:0.95:0.05 * update * update inferrer * update inferrer * add todo to exporter * adapter detection inferrer for tiling * fix bug * revert unnecessary changes * fix typo and add minor change
1 parent bc04c98 commit 2f4fb6b

10 files changed

Lines changed: 58 additions & 87 deletions

File tree

mpa/cls/inferrer.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -100,17 +100,17 @@ def _infer(self, cfg, dump_features=False, dump_saliency_map=False):
100100
outputs = data_infos
101101
else:
102102
with FeatureVectorHook(model.module.backbone) if dump_features else nullcontext() as fhook:
103-
with SaliencyMapHook(model.module.backbone) if dump_saliency_map else nullcontext() as shook:
104-
for data in data_loader:
105-
with torch.no_grad():
106-
result = model(return_loss=False, **data)
107-
eval_predictions.extend(result)
108-
feature_vectors = fhook.records if dump_features else [None] * len(self.dataset)
109-
saliency_maps = shook.records if dump_saliency_map else [None] * len(self.dataset)
103+
with SaliencyMapHook(model.module.backbone) if dump_saliency_map else nullcontext() as shook:
104+
for data in data_loader:
105+
with torch.no_grad():
106+
result = model(return_loss=False, **data)
107+
eval_predictions.extend(result)
108+
feature_vectors = fhook.records if dump_features else [None] * len(self.dataset)
109+
saliency_maps = shook.records if dump_saliency_map else [None] * len(self.dataset)
110110

111111
assert len(eval_predictions) == len(feature_vectors) == len(saliency_maps), \
112-
'Number of elements should be the same, however, number of outputs are ' \
113-
f"{len(eval_predictions)}, {len(feature_vectors)}, and {len(saliency_maps)}"
112+
'Number of elements should be the same, however, number of outputs are ' \
113+
f"{len(eval_predictions)}, {len(feature_vectors)}, and {len(saliency_maps)}"
114114
outputs = dict(
115115
eval_predictions=eval_predictions,
116116
feature_vectors=feature_vectors,

mpa/cls/stage.py

Lines changed: 1 addition & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -84,10 +84,6 @@ def configure(self, model_cfg, model_ckpt, data_cfg, training=True, **kwargs):
8484
if cfg.model.get('multilabel', False) or cfg.model.get('hierarchical', False):
8585
cfg.model.head.pop('topk', None)
8686

87-
# Other hyper-parameters
88-
if cfg.get('hyperparams', False):
89-
self.configure_hyperparams(cfg, training, **kwargs)
90-
9187
return cfg
9288

9389
@staticmethod
@@ -147,7 +143,7 @@ def configure_task(cfg, training, model_meta=None, **kwargs):
147143

148144
model_tasks, dst_classes = None, None
149145
model_classes, data_classes = [], []
150-
train_data_cfg = Stage.get_train_data_cfg(cfg)
146+
train_data_cfg = Stage.get_data_cfg(cfg, "train")
151147
if isinstance(train_data_cfg, list):
152148
train_data_cfg = train_data_cfg[0]
153149

@@ -268,18 +264,6 @@ def configure_task(cfg, training, model_meta=None, **kwargs):
268264
cfg.model.head.num_old_classes = len(old_classes)
269265
return model_tasks, dst_classes
270266

271-
@staticmethod
272-
def configure_hyperparams(cfg, training, **kwargs):
273-
hyperparams = kwargs.get('hyperparams', None)
274-
if hyperparams is not None:
275-
bs = hyperparams.get('bs', None)
276-
if bs is not None:
277-
cfg.data.samples_per_gpu = bs
278-
279-
lr = hyperparams.get('lr', None)
280-
if lr is not None:
281-
cfg.optimizer.lr = lr
282-
283267

284268
def refine_tasks(train_cfg, meta, adapt_type):
285269
new_tasks = train_cfg['tasks']

mpa/cls/trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ def train_worker(gpu, dataset, cfg, distributed, validate, timestamp, meta):
150150

151151
# prepare data loaders
152152
dataset = dataset if isinstance(dataset, (list, tuple)) else [dataset]
153-
train_data_cfg = Stage.get_train_data_cfg(cfg)
153+
train_data_cfg = Stage.get_data_cfg(cfg, "train")
154154
drop_last = train_data_cfg.drop_last if train_data_cfg.get('drop_last', False) else False
155155

156156
# updated to adapt list of dataset for the 'train'

mpa/det/exporter.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@ def run(self, model_cfg, model_ckpt, data_cfg, **kwargs):
3434

3535
output_path = os.path.join(cfg.work_dir, 'export')
3636
os.makedirs(output_path, exist_ok=True)
37+
3738
model = build_detector(cfg.model)
3839
if model_ckpt:
3940
load_checkpoint(model=model, filename=model_ckpt, map_location='cpu')
@@ -47,7 +48,7 @@ def run(self, model_cfg, model_ckpt, data_cfg, **kwargs):
4748
model = model.cpu()
4849
precision = kwargs.pop('precision', 'FP32')
4950
logger.info(f'Model will be exported with precision {precision}')
50-
51+
5152
export_model(model, cfg, output_path, target='openvino', precision=precision)
5253
except Exception as ex:
5354
# output_model.model_status = ModelStatus.FAILED

mpa/det/inferrer.py

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,17 @@
77
from mmcv.parallel import MMDataParallel, is_module_wrapper
88
from mmcv.runner import load_checkpoint
99

10-
from mmdet.datasets import build_dataloader, build_dataset, replace_ImageToTensor
10+
from mmdet.datasets import build_dataloader, build_dataset, replace_ImageToTensor, ImageTilingDataset
1111
from mmdet.models import build_detector
1212
from mmdet.parallel import MMDataCPU
1313
from mmdet.utils.deployment import get_saliency_map, get_feature_vector
14+
from mmdet.apis import single_gpu_test
1415

1516
from mpa.registry import STAGES
1617
from .stage import DetectionStage
1718
from mpa.utils.logger import get_logger
1819

20+
1921
logger = get_logger()
2022

2123

@@ -33,9 +35,9 @@ def run(self, model_cfg, model_ckpt, data_cfg, **kwargs):
3335
"""
3436
self._init_logger()
3537
mode = kwargs.get('mode', 'train')
36-
eval = kwargs.get('eval', False)
37-
dump_features = kwargs.get('dump_features', False)
38-
dump_saliency_map = kwargs.get('dump_saliency_map', False)
38+
eval = kwargs.pop('eval', False)
39+
dump_features = kwargs.pop('dump_features', False)
40+
dump_saliency_map = kwargs.pop('dump_saliency_map', False)
3941
if mode not in self.mode:
4042
return {}
4143

@@ -82,7 +84,7 @@ def infer(self, cfg, eval=False, dump_features=False, dump_saliency_map=False):
8284
input_source = cfg.get('input_source')
8385
logger.info(f'Inferring on input source: data.{input_source}')
8486
if input_source == 'train':
85-
src_data_cfg = self.get_train_data_cfg(cfg)
87+
src_data_cfg = self.get_data_cfg(cfg, input_source)
8688
else:
8789
src_data_cfg = cfg.data[input_source]
8890
data_cfg.test_mode = src_data_cfg.get('test_mode', False)
@@ -180,10 +182,7 @@ def dummy_dump_saliency_hook(model, input, out):
180182
model = model.module
181183
with eval_model.module.backbone.register_forward_hook(feature_vector_hook):
182184
with eval_model.module.backbone.register_forward_hook(saliency_map_hook):
183-
for data in data_loader:
184-
with torch.no_grad():
185-
result = eval_model(return_loss=False, rescale=True, **data)
186-
eval_predictions.extend(result)
185+
eval_predictions = single_gpu_test(eval_model, data_loader)
187186

188187
for key in [
189188
'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best',
@@ -193,7 +192,20 @@ def dummy_dump_saliency_hook(model, input, out):
193192

194193
metric = None
195194
if eval:
196-
metric = dataset.evaluate(eval_predictions, **cfg.evaluation)[cfg.evaluation.metric]
195+
metric = dataset.evaluate(eval_predictions, **cfg.evaluation)
196+
metric = metric['mAP'] if isinstance(cfg.evaluation.metric, list) else metric[cfg.evaluation.metric]
197+
198+
# Check and unwrap ImageTilingDataset object from TaskAdaptEvalDataset
199+
while hasattr(dataset, 'dataset') and not isinstance(dataset, ImageTilingDataset):
200+
dataset = dataset.dataset
201+
202+
if isinstance(dataset, ImageTilingDataset):
203+
saliency_maps = [saliency_maps[i] for i in range(dataset.num_samples)]
204+
feature_vectors = [feature_vectors[i] for i in range(dataset.num_samples)]
205+
if not dataset.merged_results:
206+
eval_predictions = dataset.merge(eval_predictions)
207+
else:
208+
eval_predictions = dataset.merged_results
197209

198210
outputs = dict(
199211
classes=target_classes,

mpa/det/stage.py

Lines changed: 13 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -62,10 +62,6 @@ def configure(self, model_cfg, model_ckpt, data_cfg, training=True, **kwargs):
6262
if training:
6363
self.configure_regularization(cfg)
6464

65-
# Other hyper-parameters
66-
if 'hyperparams' in cfg:
67-
self.configure_hyperparams(cfg, training, **kwargs)
68-
6965
# Hooks
7066
self.configure_hook(cfg)
7167

@@ -117,12 +113,15 @@ def configure_data(self, cfg, training, **kwargs):
117113
seed=cfg.seed
118114
)
119115
)
120-
if 'dataset' in cfg.data.train:
121-
train_cfg = self.get_train_data_cfg(cfg)
122-
train_cfg.ote_dataset = cfg.data.train.pop('ote_dataset', None)
123-
train_cfg.labels = cfg.data.train.get('labels', None)
124-
train_cfg.data_classes = cfg.data.train.pop('data_classes', None)
125-
train_cfg.new_classes = cfg.data.train.pop('new_classes', None)
116+
for subset in ("train", "val", "test"):
117+
if 'dataset' in cfg.data[subset]:
118+
subset_cfg = self.get_data_cfg(cfg, subset)
119+
subset_cfg.ote_dataset = cfg.data[subset].pop('ote_dataset', None)
120+
subset_cfg.labels = cfg.data[subset].get('labels', None)
121+
if 'data_classes' in cfg.data[subset]:
122+
subset_cfg.data_classes = cfg.data[subset].pop('data_classes')
123+
if 'new_classes' in cfg.data[subset]:
124+
subset_cfg.new_classes = cfg.data[subset].pop('new_classes')
126125

127126
def configure_task(self, cfg, training, **kwargs):
128127
"""Adjust settings for task adaptation
@@ -200,7 +199,7 @@ def configure_task_classes(self, cfg, task_adapt_type, task_adapt_op):
200199

201200
def configure_task_data_pipeline(self, cfg, model_classes, data_classes):
202201
# Trying to alter class indices of training data according to model class order
203-
tr_data_cfg = self.get_train_data_cfg(cfg)
202+
tr_data_cfg = self.get_data_cfg(cfg, "train")
204203
class_adapt_cfg = dict(type='AdaptClassLabels', src_classes=data_classes, dst_classes=model_classes)
205204
pipeline_cfg = tr_data_cfg.pipeline
206205
for i, op in enumerate(pipeline_cfg):
@@ -240,7 +239,7 @@ def configure_task_cls_incr(self, cfg, task_adapt_type, org_model_classes, model
240239
else:
241240
bbox_head = cfg.model.roi_head.bbox_head
242241
if task_adapt_type == 'mpa':
243-
tr_data_cfg = self.get_train_data_cfg(cfg)
242+
tr_data_cfg = self.get_data_cfg(cfg, "train")
244243
if tr_data_cfg.type != 'MPADetDataset':
245244
tr_data_cfg.img_ids_dict = self.get_img_ids_for_incr(cfg, org_model_classes, model_classes)
246245
tr_data_cfg.org_type = tr_data_cfg.type
@@ -311,7 +310,7 @@ def configure_task_cls_incr(self, cfg, task_adapt_type, org_model_classes, model
311310
ConfigDict(type='AdaptiveTrainSchedulingHook', **adaptive_validation_interval)
312311
)
313312
else:
314-
src_data_cfg = Stage.get_train_data_cfg(cfg)
313+
src_data_cfg = Stage.get_data_cfg(cfg, "train")
315314
src_data_cfg.pop('old_new_indices', None)
316315

317316
def configure_regularization(self, cfg):
@@ -338,7 +337,7 @@ def get_img_ids_for_incr(cfg, org_model_classes, model_classes):
338337
new_classes = np.setdiff1d(model_classes, org_model_classes).tolist()
339338
old_classes = np.intersect1d(org_model_classes, model_classes).tolist()
340339

341-
src_data_cfg = Stage.get_train_data_cfg(cfg)
340+
src_data_cfg = Stage.get_data_cfg(cfg, "train")
342341

343342
ids_old, ids_new = [], []
344343
data_cfg = cfg.data.test.copy()
@@ -366,17 +365,6 @@ def get_img_ids_for_incr(cfg, org_model_classes, model_classes):
366365
)
367366
return outputs
368367

369-
def configure_hyperparams(self, cfg, training, **kwargs):
370-
hyperparams = kwargs.get('hyperparams', None)
371-
if hyperparams is not None:
372-
bs = hyperparams.get('bs', None)
373-
if bs is not None:
374-
cfg.data.samples_per_gpu = bs
375-
376-
lr = hyperparams.get('lr', None)
377-
if lr is not None:
378-
cfg.optimizer.lr = lr
379-
380368
@staticmethod
381369
def add_yolox_hooks(cfg):
382370
update_or_add_custom_hook(

mpa/det/trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,7 @@ def train_worker(gpu, target_classes, datasets, cfg, distributed=False,
155155
# Do clustering for SSD model
156156
if hasattr(cfg.model, 'bbox_head') and hasattr(cfg.model.bbox_head, 'anchor_generator'):
157157
if getattr(cfg.model.bbox_head.anchor_generator, 'reclustering_anchors', False):
158-
train_cfg = Stage.get_train_data_cfg(cfg)
158+
train_cfg = Stage.get_data_cfg(cfg, "train")
159159
train_dataset = train_cfg.get('ote_dataset', None)
160160
cfg, model = cluster_anchors(cfg, train_dataset, model)
161161
train_detector(

mpa/seg/inferrer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ def infer(self, cfg, dump_features=False):
6262
input_source = cfg.get('input_source', 'test')
6363
self.logger.info(f'Inferring on input source: data.{input_source}')
6464
if input_source == 'train':
65-
src_data_cfg = Stage.get_train_data_cfg(cfg)
65+
src_data_cfg = Stage.get_data_cfg(cfg, "train")
6666
else:
6767
src_data_cfg = cfg.data[input_source]
6868
data_cfg = cfg.data.test.copy()

mpa/seg/stage.py

Lines changed: 2 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -62,10 +62,6 @@ def configure(self, model_cfg, model_ckpt, data_cfg, training=True, **kwargs):
6262
if 'task_adapt' in cfg:
6363
self.configure_task(cfg, training, **kwargs)
6464

65-
# Other hyper-parameters
66-
if 'hyperparams' in cfg:
67-
self.configure_hyperparams(cfg, training, **kwargs)
68-
6965
return cfg
7066

7167
def configure_model(self, cfg, training, **kwargs):
@@ -154,7 +150,7 @@ def configure_task_cls_incr(self, cfg, task_adapt_type, org_model_classes, model
154150
head.loss_decode = [self.configure_am_softmax_loss_with_ignore(model_classes)]
155151

156152
# Dataset
157-
src_data_cfg = Stage.get_train_data_cfg(cfg)
153+
src_data_cfg = Stage.get_data_cfg(cfg, "train")
158154
for mode in ['train', 'val', 'test']:
159155
if src_data_cfg.type == 'MPASegIncrDataset':
160156
if cfg.data[mode]['type'] != 'MPASegIncrDataset':
@@ -200,7 +196,7 @@ def configure_task_classes(self, cfg, task_adapt_op):
200196

201197
# Model classes
202198
if task_adapt_op == 'REPLACE':
203-
if len(data_classes) == 1: # 'background'
199+
if len(data_classes) == 1: # 'background'
204200
model_classes = org_model_classes.copy()
205201
else:
206202
model_classes = data_classes.copy()
@@ -225,14 +221,3 @@ def configure_task_classes(self, cfg, task_adapt_op):
225221
head.num_classes = len(model_classes)
226222

227223
return org_model_classes, model_classes, data_classes
228-
229-
def configure_hyperparams(self, cfg, training, **kwargs):
230-
hyperparams = kwargs.get('hyperparams', None)
231-
if hyperparams is not None:
232-
bs = hyperparams.get('bs', None)
233-
if bs is not None:
234-
cfg.data.samples_per_gpu = bs
235-
236-
lr = hyperparams.get('lr', None)
237-
if lr is not None:
238-
cfg.optimizer.lr = lr

mpa/stage.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -260,19 +260,20 @@ def get_model_meta(cfg):
260260
return meta
261261

262262
@staticmethod
263-
def get_train_data_cfg(cfg):
264-
if 'dataset' in cfg.data.train: # Concat|RepeatDataset
265-
dataset = cfg.data.train.dataset
263+
def get_data_cfg(cfg, subset):
264+
assert subset in ["train", "val", "test"], f"Unknown subset:{subset}"
265+
if 'dataset' in cfg.data[subset]: # Concat|RepeatDataset
266+
dataset = cfg.data[subset].dataset
266267
while hasattr(dataset, 'dataset'):
267268
dataset = dataset.dataset
268269
return dataset
269270
else:
270-
return cfg.data.train
271+
return cfg.data[subset]
271272

272273
@staticmethod
273274
def get_data_classes(cfg):
274275
data_classes = []
275-
train_cfg = Stage.get_train_data_cfg(cfg)
276+
train_cfg = Stage.get_data_cfg(cfg, "train")
276277
if 'data_classes' in train_cfg:
277278
data_classes = list(train_cfg.pop('data_classes', []))
278279
elif 'classes' in train_cfg:

0 commit comments

Comments
 (0)