pytorch 自定义函数
介绍:https://zhuanlan.zhihu.com/p/344802526
主要构建 static method forward 和 backward
比如 layernorm: 参考:https://github.com/zhangyi-3/KBNet/blob/main/basicsr/models/archs/kb_utils.py
导数的推导:https://blog.csdn.net/qinduohao333/article/details/132309091
python
import torch
import torch.nn as nn
import torch.nn.functional as F
class LayerNormFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, x, weight, bias, eps):
ctx.eps = eps
N, C, H, W = x.size()
mu = x.mean(1, keepdim=True)
var = (x - mu).pow(2).mean(1, keepdim=True)
# print('mu, var', mu.mean(), var.mean())
# d.append([mu.mean(), var.mean()])
y = (x - mu) / (var + eps).sqrt()
weight, bias, y = weight.contiguous(), bias.contiguous(), y.contiguous() # avoid cuda error
ctx.save_for_backward(y, var, weight)
y = weight.view(1, C, 1, 1) * y + bias.view(1, C, 1, 1)
return y
@staticmethod
def backward(ctx, grad_output):
eps = ctx.eps
N, C, H, W = grad_output.size()
# y, var, weight = ctx.saved_variables
y, var, weight = ctx.saved_tensors
g = grad_output * weight.view(1, C, 1, 1)
mean_g = g.mean(dim=1, keepdim=True)
mean_gy = (g * y).mean(dim=1, keepdim=True)
gx = 1. / torch.sqrt(var + eps) * (g - y * mean_gy - mean_g)
return gx, (grad_output * y).sum(dim=3).sum(dim=2).sum(dim=0), grad_output.sum(dim=3).sum(dim=2).sum(
dim=0), None
class LayerNorm2d(nn.Module):
def __init__(self, channels, eps=1e-6, requires_grad=True):
super(LayerNorm2d, self).__init__()
self.register_parameter('weight', nn.Parameter(torch.ones(channels), requires_grad=requires_grad))
self.register_parameter('bias', nn.Parameter(torch.zeros(channels), requires_grad=requires_grad))
self.eps = eps
def forward(self, x):
return LayerNormFunction.apply(x, self.weight, self.bias, self.eps)