import torch
import torch.nn as nn
X = torch.rand((6, 8))
Y = torch.rand((6, 7))
# (批量⼤⼩、通道、⾼度、宽度)
X = X.reshape((1, 1, 6, 8))
Y = Y.reshape((1, 1, 6, 7))
lr = 3e-2
# 构造卷积层 PyTorch 会使用默认的初始化方法,例如 Xavier 初始化或 Kaim 初始化,来随机初始化卷积核的权重
conv2d = nn.Conv2d(1, 1, kernel_size=(1, 2), bias=False) # 卷积核的形状是(1, 1, 1, 2)
print(conv2d.weight) # 卷积核权重
for i in range(10):
y_pred = conv2d(X)
loss = (y_pred - Y) ** 2
conv2d.zero_grad()
loss.sum().backward()
conv2d.weight.data[:] -= lr*conv2d.weight.grad
print('epoch{} loss{}'.format(i+1, loss.sum()))
标签:loss,20,weight,初始化,卷积,torch,学习,conv2d
From: https://www.cnblogs.com/morehair/p/18379241