实现二维卷积层

发布时间 2023-08-06 14:31:42作者: 不像话
import torch
from torch import nn
from d2l import torch as d2l

def corr2d(x,k):
    """计算二维互相关运算"""
    # 获取卷积核的高和宽
    h,w=k.shape
    # 输出的高和宽
    y=torch.zeros((x.shape[0]-h+1,x.shape[1]-w+1))
    for i in range(y.shape[0]):
        for j in range(y.shape[1]):
            # 矩阵乘法点积求和
            y[i,j]=(x[i:i+h,j:j+w]*k).sum()
    return y

class Conv2D(nn.Module):
    def __init__(self,kernel_size):
        super().__init__()
        self.weight = nn.Parameter(torch.rand(kernel_size))
        self.bias = nn.Parameter(torch.zeros(1))

    def forward(self,x):
        return corr2d(x,self.weight)+self.bias

# 做边缘检测
x=torch.ones((6,8))
x[:,2:6]=0
# 定义核
k = torch.tensor([[1.0,-1.0]])
y=corr2d(x,k)
print(y)

# 学习由x生成y的卷积核
# 输入通道1 输出通道1
conv2d=nn.Conv2d(1,1,kernel_size=(1,2),bias=False)
# 通道数1 批量数1
x=x.reshape((1,1,6,8))
y=y.reshape((1,1,6,7))

print(x)
print(y)

for i in range(10):
    y_hat = conv2d(x)
    # 均方误差作为loss
    print('y_hat是',y_hat)
    print('y是',y)
    l=(y_hat-y)**2
    print('l是',l)
    conv2d.zero_grad()
    l.sum().backward()
    # print('l.sum().backward()是',l.sum().backward())
    conv2d.weight.data[:]-=3e-2*conv2d.weight.grad
    print('conv2d.weight.grad是',conv2d.weight.grad)
    print(conv2d.weight.data[:])
    if (i+1)%2==0:
        print(f'batch{i+1},loas{l.sum():.2f}')

conv2d.weight.data.reshape((1,2))