Skip to content
Snippets Groups Projects
test_runner.py 85.9 KiB
Newer Older
        self.assertEqual(runner.optim_wrapper['linear1'].param_groups[0]['lr'],
        self.assertIsInstance(runner.optim_wrapper['linear2'].optimizer, Adam)
        self.assertEqual(runner.optim_wrapper['linear2'].param_groups[0]['lr'],
                         0.0002)
        self.assertIsInstance(runner.param_schedulers, dict)
        self.assertEqual(len(runner.param_schedulers['linear1']), 1)
        self.assertIsInstance(runner.param_schedulers['linear1'][0],
                              MultiStepLR)
        self.assertEqual(runner.param_schedulers['linear1'][0].milestones, {
            1: 1,
            2: 1
        })
        self.assertEqual(len(runner.param_schedulers['linear2']), 1)
        self.assertIsInstance(runner.param_schedulers['linear2'][0],
                              MultiStepLR)
        self.assertEqual(runner.param_schedulers['linear2'][0].milestones, {
            1: 1,
            2: 1
        })

        # 2. test iter based
        cfg = copy.deepcopy(self.iter_based_cfg)
        cfg.experiment_name = 'test_checkpoint8'
        runner = Runner.from_cfg(cfg)
        # 2.1 test `save_checkpoint` which is called by `CheckpointHook`
        path = osp.join(self.temp_dir, 'iter_12.pth')
        self.assertTrue(osp.exists(path))
        self.assertFalse(osp.exists(osp.join(self.temp_dir, 'epoch_13.pth')))

        ckpt = torch.load(path)
        self.assertEqual(ckpt['meta']['epoch'], 0)
        self.assertEqual(ckpt['meta']['iter'], 12)
        assert isinstance(ckpt['optimizer'], dict)
        assert isinstance(ckpt['param_schedulers'], list)
        self.assertIsInstance(ckpt['message_hub'], dict)
        message_hub.load_state_dict(ckpt['message_hub'])
        self.assertEqual(message_hub.get_info('epoch'), 0)
        self.assertEqual(message_hub.get_info('iter'), 11)

        # 2.2 test `load_checkpoint`
        cfg = copy.deepcopy(self.iter_based_cfg)
        cfg.experiment_name = 'test_checkpoint9'
        runner = Runner.from_cfg(cfg)
        runner.load_checkpoint(path)
        self.assertEqual(runner.epoch, 0)
        self.assertEqual(runner.iter, 0)
        self.assertTrue(runner._has_loaded)

        # 2.3 test `resume`
        cfg = copy.deepcopy(self.iter_based_cfg)
        cfg.experiment_name = 'test_checkpoint10'
        runner = Runner.from_cfg(cfg)
        runner.resume(path)
        self.assertEqual(runner.epoch, 0)
        self.assertEqual(runner.iter, 12)
        self.assertTrue(runner._has_loaded)
        self.assertIsInstance(runner.optim_wrapper.optimizer, SGD)
        self.assertIsInstance(runner.param_schedulers[0], MultiStepLR)
        self.assertEqual(runner.message_hub.get_info('epoch'), 0)
        self.assertEqual(runner.message_hub.get_info('iter'), 11)

        # 2.4 test auto resume
        cfg = copy.deepcopy(self.iter_based_cfg)
        cfg.experiment_name = 'test_checkpoint11'
        cfg.resume = True
        runner = Runner.from_cfg(cfg)
        runner.load_or_resume()
        self.assertEqual(runner.epoch, 0)
        self.assertEqual(runner.iter, 12)
        self.assertTrue(runner._has_loaded)
        self.assertIsInstance(runner.optim_wrapper.optimizer, SGD)
        self.assertIsInstance(runner.param_schedulers[0], MultiStepLR)

        # 2.5 test resume from a specified checkpoint
        cfg = copy.deepcopy(self.iter_based_cfg)
        cfg.experiment_name = 'test_checkpoint12'
        cfg.resume = True
        cfg.load_from = osp.join(self.temp_dir, 'iter_3.pth')
        runner = Runner.from_cfg(cfg)
        runner.load_or_resume()
        self.assertEqual(runner.epoch, 0)
        self.assertEqual(runner.iter, 3)
        self.assertTrue(runner._has_loaded)
        self.assertIsInstance(runner.optim_wrapper.optimizer, SGD)
        self.assertIsInstance(runner.param_schedulers[0], MultiStepLR)
        # 2.6 test resumed message_hub has the history value.
        cfg = copy.deepcopy(self.iter_based_cfg)
        cfg.experiment_name = 'test_checkpoint13'
        cfg.resume = True
        cfg.load_from = osp.join(self.temp_dir, 'iter_3.pth')
        runner = Runner.from_cfg(cfg)
        runner.load_or_resume()
        assert len(runner.message_hub.log_scalars['train/lr'].data[1]) == 3
        assert len(MessageHub.get_current_instance().log_scalars['train/lr'].
                   data[1]) == 3

        # 2.7.1 test `resume` 2 optimizers and 1 scheduler list.
        path = osp.join(self.temp_dir, 'epoch_3.pth')
        optim_cfg = dict(
            linear1=dict(
                type='OptimWrapper', optimizer=dict(type='SGD', lr=0.01)),
            linear2=dict(
                type='OptimWrapper', optimizer=dict(type='Adam', lr=0.02)),
            constructor='ToyMultipleOptimizerConstructor')
        cfg = copy.deepcopy(self.epoch_based_cfg)
        cfg.experiment_name = 'test_checkpoint14'
        cfg.optim_wrapper = optim_cfg
        cfg.param_scheduler = dict(type='MultiStepLR', milestones=[1, 2, 3])
        cfg.model = dict(type='ToyGANModel')
        resumed_cfg = copy.deepcopy(cfg)
        runner = Runner.from_cfg(cfg)
        runner.train()
        resumed_cfg.experiment_name = 'test_checkpoint15'
        runner = Runner.from_cfg(resumed_cfg)
        runner.resume(path)
        self.assertEqual(len(runner.param_schedulers['linear1']), 1)
        self.assertEqual(len(runner.param_schedulers['linear2']), 1)
        self.assertIsInstance(runner.param_schedulers['linear1'][0],
                              MultiStepLR)
        self.assertIsInstance(runner.param_schedulers['linear2'][0],
                              MultiStepLR)

        # 2.7.2 test `resume` 2 optimizers and 2 scheduler list.
        cfg = copy.deepcopy(self.epoch_based_cfg)
        cfg.experiment_name = 'test_checkpoint16'
        cfg.optim_wrapper = optim_cfg
        cfg.param_scheduler = dict(
            linear1=dict(type='MultiStepLR', milestones=[1, 2, 3]),
            linear2=dict(type='StepLR', gamma=0.1, step_size=3))
        cfg.model = dict(type='ToyGANModel')
        resumed_cfg = copy.deepcopy(cfg)
        runner = Runner.from_cfg(cfg)
        runner.train()
        resumed_cfg.experiment_name = 'test_checkpoint17'
        runner = Runner.from_cfg(resumed_cfg)
        runner.resume(path)
        self.assertEqual(len(runner.param_schedulers['linear1']), 1)
        self.assertEqual(len(runner.param_schedulers['linear2']), 1)
        self.assertIsInstance(runner.param_schedulers['linear1'][0],
                              MultiStepLR)
        self.assertIsInstance(runner.param_schedulers['linear2'][0], StepLR)

        # 2.7.3 test `resume` 2 optimizers and 0 sheduler list.
        cfg = copy.deepcopy(self.epoch_based_cfg)
        cfg.experiment_name = 'test_checkpoint18'
        cfg.optim_wrapper = optim_cfg
        cfg.model = dict(type='ToyGANModel')
        cfg.param_scheduler = None
        resumed_cfg = copy.deepcopy(cfg)
        runner = Runner.from_cfg(cfg)
        runner.train()
        resumed_cfg.experiment_name = 'test_checkpoint19'
        runner = Runner.from_cfg(resumed_cfg)
        runner.resume(path)
        self.assertIsNone(runner.param_schedulers)

    def test_build_runner(self):
        # No need to test other cases which have been tested in
        # `test_build_from_cfg`
        # test custom runner
        cfg = copy.deepcopy(self.epoch_based_cfg)
        cfg.experiment_name = 'test_build_runner1'
        cfg.runner_type = 'CustomRunner'
        assert isinstance(RUNNERS.build(cfg), CustomRunner)

        # test default runner
        cfg = copy.deepcopy(self.epoch_based_cfg)
        cfg.experiment_name = 'test_build_runner2'
        assert isinstance(RUNNERS.build(cfg), Runner)