优化器实现方向传播
@HOOKS.register_module()class OptimizerHook(Hook):def __init__(self, grad_clip=None):self.grad_clip = grad_clipdef clip_grads(self, params):params = list(filter(lambda p: p.requires_grad and p.grad is not None, params))if len(params) > 0:return clip_grad.clip_grad_norm_(params, **self.grad_clip)def after_train_iter(self, runner):runner.optimizer.zero_grad()runner.outputs['loss'].backward()if self.grad_clip is not None:grad_norm = self.clip_grads(runner.model.parameters())if grad_norm is not None:# Add grad norm to the loggerrunner.log_buffer.update({'grad_norm': float(grad_norm)},runner.outputs['num_samples'])runner.optimizer.step()
