自定义层

构造一个没有任何参数的自定义层

In [2]:
import torch
import torch.nn.functional as F
from torch import nn

class CenteredLayer(nn.Module):
    def __init__(self):
        super().__init__()

    def forward(self, X):
        return X - X.mean()

layer = CenteredLayer()
layer(torch.FloatTensor([1, 2, 3, 4, 5]))
Out[2]:
tensor([-2., -1.,  0.,  1.,  2.])

将层作为组件合并到构建更复杂的模型中

In [4]:
net = nn.Sequential(nn.Linear(8, 128), CenteredLayer())

Y = net(torch.rand(4, 8))
Y.mean()
Out[4]:
tensor(4.6566e-10, grad_fn=<MeanBackward0>)

带参数的层

In [6]:
class MyLinear(nn.Module):
    def __init__(self, in_units, units):
        super().__init__()
        self.weight = nn.Parameter(torch.randn(in_units, units))
        self.bias = nn.Parameter(torch.randn(units,))

    def forward(self, X):
        linear = torch.matmul(X, self.weight.data) + self.bias.data
        return F.relu(linear)

linear = MyLinear(5, 3)
linear.weight
Out[6]:
Parameter containing:
tensor([[-0.5359,  0.1707,  0.1999],
        [-1.7083,  0.9041,  0.1031],
        [-0.9424, -0.7027,  0.7929],
        [ 0.3570, -0.8159, -1.0664],
        [-2.1450, -0.1423,  1.0392]], requires_grad=True)

使用自定义层直接执行正向传播计算

In [7]:
linear(torch.rand(2, 5))
Out[7]:
tensor([[0.0000, 0.0000, 1.5329],
        [0.0000, 0.0000, 1.4804]])

使用自定义层构建模型

In [8]:
net = nn.Sequential(MyLinear(64, 8), MyLinear(8, 1))
net(torch.rand(2, 64))
Out[8]:
tensor([[0.],
        [0.]])