diff --git a/mmengine/hooks/optimizer_hook.py b/mmengine/hooks/optimizer_hook.py index 9107dbf02500e24d471271fd99a7fc1b29ad12fe..6a8f7c110a78bf4cc758022461191d6fb9525ee7 100644 --- a/mmengine/hooks/optimizer_hook.py +++ b/mmengine/hooks/optimizer_hook.py @@ -92,8 +92,8 @@ class OptimizerHook(Hook): grad_norm = self.clip_grads(runner.model.parameters()) if grad_norm is not None: # Add grad norm to the logger - runner.log_buffer.update({'grad_norm': float(grad_norm)}, - runner.outputs['num_samples']) + runner.message_hub.update_scalar('train/grad_norm', + float(grad_norm)) runner.optimizer.step() def detect_anomalous_parameters(self, loss: torch.Tensor, runner) -> None: