Newer
Older
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
Args:
loop (BaseLoop or dict): A validation loop or a dict to build
validation loop. If ``loop`` is a validation loop object, just
returns itself.
Returns:
:obj:`BaseLoop`: Validation loop object build from ``loop``.
"""
if isinstance(loop, BaseLoop):
return loop
elif not isinstance(loop, dict):
raise TypeError(
f'train_loop should be a Loop object or dict, but got {loop}')
loop_cfg = copy.deepcopy(loop)
if 'type' in loop_cfg:
loop = LOOPS.build(
loop_cfg,
default_args=dict(
runner=self,
dataloader=self.val_dataloader,
evaluator=self.val_evaluator))
else:
loop = ValLoop(
runner=self,
dataloader=self.val_dataloader,
evaluator=self.val_evaluator, # type: ignore
**loop_cfg,
) # type: ignore
return loop # type: ignore
def build_test_loop(self, loop: Union[BaseLoop, Dict]) -> BaseLoop:
"""Build test loop.
Examples of ``loop``:
# `TestLoop` will be used
loop = dict()
# custom test loop
loop = dict(type='CustomTestLoop')
Args:
loop (BaseLoop or dict): A test loop or a dict to build test loop.
If ``loop`` is a test loop object, just returns itself.
Args:
loop_cfg (dict): Config to build test loop.
Returns:
:obj:`BaseLoop`: Test loop object build from ``loop_cfg``.
"""
if isinstance(loop, BaseLoop):
return loop
elif not isinstance(loop, dict):
raise TypeError(
f'train_loop should be a Loop object or dict, but got {loop}')
loop_cfg = copy.deepcopy(loop) # type: ignore
if 'type' in loop_cfg:
loop = LOOPS.build(
loop_cfg,
default_args=dict(
runner=self,
dataloader=self.test_dataloader,
evaluator=self.test_evaluator))
else:
loop = TestLoop(
runner=self,
dataloader=self.test_dataloader,
evaluator=self.test_evaluator) # type: ignore
return loop # type: ignore
def load_or_resume(self) -> None:
"""load or resume checkpoint."""
if self._has_loaded:
return None
# decide to load from checkpoint or resume from checkpoint
resume_from = None
if self._resume and self._load_from is None:
# auto resume from the latest checkpoint
resume_from = find_latest_checkpoint(self.work_dir)
self.logger.info(
f'Auto resumed from the latest checkpoint {resume_from}.')
elif self._resume and self._load_from is not None:
# resume from the specified checkpoint
resume_from = self._load_from
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
if resume_from is not None:
self.resume(resume_from)
self._has_loaded = True
elif self._load_from is not None:
self.load_checkpoint(self._load_from)
self._has_loaded = True
def train(self) -> None:
"""Launch training."""
if self.train_loop is None:
raise RuntimeError(
'`self.train_loop` should not be None when calling train '
'method. Please provide `train_dataloader`, `train_cfg`, '
'`optimizer` and `param_scheduler` arguments when '
'initializing runner.')
self.train_loop = self.build_train_loop(
self.train_loop) # type: ignore
if self.val_loop is not None:
self.val_loop = self.build_val_loop(self.val_loop) # type: ignore
self.load_or_resume()
# TODO: add a contextmanager to avoid calling `before_run` many times
self.call_hook('before_run')
self.train_loop.run() # type: ignore
self.call_hook('after_run')
def val(self) -> None:
"""Launch validation."""
if self.val_loop is None:
raise RuntimeError(
'`self.val_loop` should not be None when calling val method.'
'Please provide `val_dataloader`, `val_cfg` and '
'`val_evaluator` arguments when initializing runner.')
self.val_loop = self.build_val_loop(self.val_loop) # type: ignore
self.load_or_resume()
self.call_hook('before_run')
self.val_loop.run() # type: ignore
self.call_hook('after_run')
def test(self) -> None:
"""Launch test."""
if self.test_loop is None:
raise RuntimeError(
'`self.test_loop` should not be None when calling test method.'
'Please provide `test_dataloader`, `test_cfg` and '
'`test_evaluator` arguments when initializing runner.')
self.test_loop = self.build_test_loop(self.test_loop) # type: ignore
self.load_or_resume()
self.call_hook('before_run')
self.test_loop.run() # type: ignore
self.call_hook('after_run')
def call_hook(self, fn_name: str, **kwargs) -> None:
"""Call all hooks.
Args:
fn_name (str): The function name in each hook to be called, such as
"before_train_epoch".
**kwargs: Keyword arguments passed to hook.
"""
for hook in self._hooks:
# support adding additional custom hook methods
if hasattr(hook, fn_name):
getattr(hook, fn_name)(self, **kwargs)
def register_hook(
self,
hook: Union[Hook, Dict],
priority: Optional[Union[str, int, Priority]] = None) -> None:
"""Register a hook into the hook list.
The hook will be inserted into a priority queue, with the specified
priority (See :class:`Priority` for details of priorities).
For hooks with the same priority, they will be triggered in the same
order as they are registered.
Priority of hook will be decided with the following priority:
- ``priority`` argument. If ``priority`` is given, it will be priority
of hook.
- If ``hook`` argument is a dict and ``priority`` in it, the priority
will be the value of ``hook['priority']``.
- If ``hook`` argument is a dict but ``priority`` not in it or ``hook``
is an instance of ``hook``, the priority will be ``hook.priority``.
Args:
hook (:obj:`Hook` or dict): The hook to be registered.
priority (int or str or :obj:`Priority`, optional): Hook priority.
Lower value means higher priority.
"""
if not isinstance(hook, (Hook, dict)):
raise TypeError(
f'hook should be an instance of Hook or dict, but got {hook}')
_priority = None
if isinstance(hook, dict):
if 'priority' in hook:
_priority = hook.pop('priority')
hook_obj = HOOKS.build(hook)
else:
hook_obj = hook
if priority is not None:
hook_obj.priority = priority
elif _priority is not None:
hook_obj.priority = _priority
inserted = False
for i in range(len(self._hooks) - 1, -1, -1):
if get_priority(hook_obj.priority) >= get_priority(
self._hooks[i].priority):
self._hooks.insert(i + 1, hook_obj)
inserted = True
break
if not inserted:
self._hooks.insert(0, hook_obj)
def register_default_hooks(
self,
hooks: Optional[Dict[str, Union[Hook, Dict]]] = None) -> None:
"""Register default hooks into hook list.
``hooks`` will be registered into runner to execute some default
actions like updating model parameters or saving checkpoints.
Default hooks and their priorities:
+----------------------+-------------------------+
| Hooks | Priority |
+======================+=========================+
| OptimizerHook | HIGH (30) |
+----------------------+-------------------------+
| IterTimerHook | NORMAL (40) |
+----------------------+-------------------------+
| DistSamplerSeedHook | NORMAL (40) |
+----------------------+-------------------------+
| LoggerHook | BELOW_NORMAL (60) |
+----------------------+-------------------------+
| ParamSchedulerHook | LOW (70) |
+----------------------+-------------------------+
| CheckpointHook | VERY_LOW (90) |
+----------------------+-------------------------+
If ``hooks`` is None, above hooks will be registered by
default::
default_hooks = dict(
optimizer=dict(type='OptimizerHook', grad_clip=None),
timer=dict(type='IterTimerHook'),
sampler_seed=dict(type='DistSamplerSeedHook'),
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
logger=dict(type='LoggerHook'),
param_scheduler=dict(type='ParamSchedulerHook'),
checkpoint=dict(type='CheckpointHook', interval=1),
)
If not None, ``hooks`` will be merged into ``default_hooks``.
If there are None value in default_hooks, the corresponding item will
be popped from ``default_hooks``::
hooks = dict(timer=None)
The final registered default hooks will be :obj:`OptimizerHook`,
:obj:`LoggerHook`, :obj:`ParamSchedulerHook` and :obj:`CheckpointHook`.
Args:
hooks (dict[str, Hook or dict], optional): Default hooks or configs
to be registered.
"""
default_hooks: dict = dict(
optimizer=dict(type='OptimizerHook', grad_clip=None),
timer=dict(type='IterTimerHook'),
logger=dict(type='LoggerHook'),
param_scheduler=dict(type='ParamSchedulerHook'),
checkpoint=dict(type='CheckpointHook', interval=1),
sampler_seed=dict(type='DistSamplerSeedHook'),
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
)
if hooks is not None:
for name, hook in hooks.items():
if name in default_hooks and hook is None:
# remove hook from _default_hooks
default_hooks.pop(name)
else:
assert hook is not None
default_hooks[name] = hook
for hook in default_hooks.values():
self.register_hook(hook)
def register_custom_hooks(self, hooks: List[Union[Hook, Dict]]) -> None:
"""Register custom hooks into hook list.
Args:
hooks (list[Hook | dict]): List of hooks or configs to be
registered.
"""
for hook in hooks:
self.register_hook(hook)
def register_hooks(
self,
default_hooks: Optional[Dict[str, Union[Hook, Dict]]] = None,
custom_hooks: Optional[List[Union[Hook, Dict]]] = None) -> None:
"""Register default hooks and custom hooks into hook list.
Args:
default_hooks (dict[str, dict] or dict[str, Hook], optional): Hooks
to execute default actions like updating model parameters and
saving checkpoints. Defaults to None.
custom_hooks (list[dict] or list[Hook], optional): Hooks to execute
custom actions like visualizing images processed by pipeline.
Defaults to None.
"""
self.register_default_hooks(default_hooks)
if custom_hooks is not None:
self.register_custom_hooks(custom_hooks)
def resume(self,
filename: str,
resume_optimizer: bool = True,
resume_param_scheduler: bool = True,
map_location: Union[str, Callable] = 'default') -> None:
"""Resume model from checkpoint.
Args:
filename (str): Accept local filepath, URL, ``torchvision://xxx``,
``open-mmlab://xxx``.
resume_optimizer (bool): Whether to resume optimizer state.
Defaults to True.
resume_param_scheduler (bool): Whether to resume param scheduler
state. Defaults to True.
map_location (str or callable):A string or a callable function to
specifying how to remap storage locations.
Defaults to 'default'.
"""
if map_location == 'default':
if torch.cuda.is_available():
device_id = torch.cuda.current_device()
checkpoint = self.load_checkpoint(
filename,
map_location=lambda storage, loc: storage.cuda(device_id))
else:
checkpoint = self.load_checkpoint(filename)
else:
checkpoint = self.load_checkpoint(
filename, map_location=map_location)
self._epoch = checkpoint['meta']['epoch']
self._iter = checkpoint['meta']['iter']
if self.meta is None:
self.meta = {}
self.meta.setdefault('hook_msgs', {})
# load `last_ckpt`, `best_score`, `best_ckpt`, etc. for hook messages
self.meta['hook_msgs'].update(checkpoint['meta'].get('hook_msgs', {}))
# check whether the number of GPU used for current experiment
# is consistent with resuming from checkpoint
if 'config' in checkpoint['meta']:
config = mmengine.Config.fromstring(
checkpoint['meta']['config'], file_format='.py')
previous_gpu_ids = config.get('gpu_ids', None)
if (previous_gpu_ids is not None and len(previous_gpu_ids) > 0
and len(previous_gpu_ids) != self._world_size):
# TODO, should we modify the iteration?
self.logger.info(
'Number of GPU used for current experiment is not '
'consistent with resuming from checkpoint')
# resume meta information meta
self.meta = checkpoint['meta']
# resume optimizer
if 'optimizer' in checkpoint and resume_optimizer:
self.optimizer = self.build_optimizer(self.optimizer)
self.optimizer.load_state_dict(checkpoint['optimizer'])
# resume param scheduler
if 'param_schedulers' in checkpoint and resume_param_scheduler:
self.param_schedulers = self.build_param_scheduler( # type: ignore
self.param_schedulers)
for cur_scheduler, ckpt_scheduler in zip(
self.param_schedulers, checkpoint['param_schedulers']):
cur_scheduler.load_state_dict(ckpt_scheduler) # type: ignore
self._has_loaded = True
self.logger.info(f'resumed epoch: {self._epoch}, iter: {self._iter}')
def load_checkpoint(self,
filename: str,
map_location: Union[str, Callable] = 'cpu',
strict: bool = False,
revise_keys: list = [(r'^module.', '')]):
"""Load checkpoint from given ``filename``.
Args:
filename (str): Accept local filepath, URL, ``torchvision://xxx``,
``open-mmlab://xxx``.
map_location (str or callable): A string or a callable function to
specifying how to remap storage locations.
Defaults to 'cpu'.
strict (bool): strict (bool): Whether to allow different params for
the model and checkpoint.
revise_keys (list): A list of customized keywords to modify the
state_dict in checkpoint. Each item is a (pattern, replacement)
pair of the regular expression operations. Default: strip
the prefix 'module.' by [(r'^module\\.', '')].
"""
checkpoint = _load_checkpoint(filename, map_location=map_location)
# Add comments to describe the usage of `after_load_ckpt`
self.call_hook('after_load_ckpt', checkpoint=checkpoint)
if is_model_wrapper(self.model):
model = self.model.module
else:
model = self.model
checkpoint = _load_checkpoint_to_model(
model, checkpoint, strict, revise_keys=revise_keys)
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
1512
1513
1514
1515
1516
1517
1518
1519
1520
1521
1522
1523
1524
1525
1526
1527
1528
1529
1530
1531
1532
self._has_loaded = True
self.logger.info(f'Load checkpoint from {filename}')
return checkpoint
@master_only
def save_checkpoint(self,
out_dir: str,
filename: str,
save_optimizer: bool = True,
save_param_scheduler: bool = True,
meta: dict = None,
create_symlink: bool = True,
by_epoch: bool = True):
"""Save checkpoints.
``CheckpointHook`` invokes this method to save checkpoints
periodically.
Args:
out_dir (str): The directory that checkpoints are saved.
filename (str): The checkpoint filename.
save_optimizer (bool): Whether to save the optimizer to
the checkpoint. Defaults to True.
save_param_scheduler (bool): Whether to save the param_scheduler
to the checkpoint. Defaults to True.
meta (dict, optional): The meta information to be saved in the
checkpoint. Defaults to None.
create_symlink (bool): Whether to create a symlink
"latest.pth" to point to the latest checkpoint.
Defaults to True.
"""
if meta is None:
meta = {}
elif not isinstance(meta, dict):
raise TypeError(
f'meta should be a dict or None, but got {type(meta)}')
if self.meta is not None:
meta.update(self.meta)
if by_epoch:
# self._epoch increments 1 after
# `self.call_hook('after_train_epoch)` but `save_checkpoint` is
# called by `after_train_epoch`` method of `CheckpointHook` so
# `epoch` should be `self_epoch + 1`
meta.update(epoch=self._epoch + 1, iter=self._iter)
else:
meta.update(epoch=self._epoch, iter=self._iter + 1)
filepath = osp.join(out_dir, filename)
if hasattr(self.model, 'CLASSES') and self.model.CLASSES is not None:
# save class name to the meta
meta.update(CLASSES=self.model.CLASSES)
if is_model_wrapper(self.model):
model = self.model.module
else:
model = self.model
checkpoint = {
'meta': meta,
'state_dict': weights_to_cpu(get_state_dict(model))
}
# save optimizer state dict to checkpoint
if save_optimizer:
if isinstance(self.optimizer, Optimizer):
checkpoint['optimizer'] = self.optimizer.state_dict()
else: # TODO
raise TypeError(
'self.optimizer should be an optimizer, but got '
f'{self.optimizer}')
# save param scheduler state dict
if save_param_scheduler:
checkpoint['param_schedulers'] = []
for _scheduler in self.param_schedulers:
state_dict = _scheduler.state_dict() # type: ignore
checkpoint['param_schedulers'].append(state_dict)
self.call_hook('before_save_ckpt', checkpoint=checkpoint)
save_checkpoint(checkpoint, filepath)
# in some environments, `os.symlink` is not supported, you may need to
# set `create_symlink` to False
if create_symlink:
dst_file = osp.join(out_dir, 'latest.pth')
if platform.system() != 'Windows':
symlink(filename, dst_file)
else:
shutil.copy(filepath, dst_file)
@master_only
def dump_config(self) -> None:
"""Dump config to `work_dir`."""
if isinstance(self.cfg,
Config) and self.cfg.get('filename') is not None:
self.cfg.dump(
osp.join(self.work_dir, osp.basename(self.cfg.filename)))
elif self.cfg:
# TODO
pass