可视化全连接层(蒙特卡洛法)

发布时间 2023-06-01 18:15:26作者: 澳大利亚树袋熊
import random
import torch
import torch.nn as nn
import matplotlib.pyplot as plt
import numpy as np
import math
from torch.utils.data import DataLoader
from torch.utils.data import Dataset

epochs=1000
class pt:
    def __init__(self,x,y):
        self.x=x
        self.y=y

class logistic(nn.Module):
    def __init__(self):
        super(logistic, self).__init__()
        self.w = torch.nn.Parameter(torch.randn(2, 1))
        self.b = torch.nn.Parameter(torch.zeros([1]))
        self.line1=torch.nn.Linear(2,1)
        self.line2=nn.Sequential(
          nn.Linear(2,5000),
            nn.ReLU(),
        nn.Linear(5000, 1),
        )
        self.pred=None
    def forward(self, X):
        #self.pred=torch.matmul(X,self.w)+self.b
        self.pred=self.line2(X)
        return torch.sigmoid(self.pred),self.pred
def generate_point(th=0.4,start=50,end=90):
    class_list=[]
    point_list=[]
    for angle in range(360):
        theta=3.14/180.0*angle
        x=math.cos(theta)+random.random()*th
        y=math.sin(theta)+random.random()*th
        point_list.append(list([x,y]))
        if angle>start and angle<end or angle>180 and angle<230 or angle>250 and angle<300:#or angle>180 and angle<230
            class_list.append(0)
        else:
            class_list.append(1)
    return np.array(point_list),np.array(class_list)

class fdata(Dataset):
    # dirname 为训练/测试数据地址,使得训练/测试分开
    def __init__(self, train=True):
        super(fdata, self).__init__()
        self.data,self.label = generate_point()

    def __len__(self):
        return self.data.shape[0]

    def __getitem__(self, index):
        image = self.data[index]
        image = image.astype(np.float32)
        image = torch.unsqueeze(torch.from_numpy(image),0)
        label = self.label[index]
        label = np.array(label.astype(np.float32)).reshape(1)
        label = torch.unsqueeze(torch.from_numpy(label), 0)
        return image,label


def draw(pt_list,cls_list,module):
    plt.title('circle')
    pt_r,pt_b=[],[]
    for n in range(len(cls_list)):
        if(cls_list[n]==1):
            pt_r.append(pt_list[n])
        else:
            pt_b.append(pt_list[n])
    pt_r=np.array(pt_r)
    pt_b = np.array(pt_b)
    line_list=[]
    for n in range(-10,20):
        n=n*0.1
        for m in range(-10,20):
            m=m*0.1
            line_list.append([n,m])

    line_array=[]
    line_tensor=torch.from_numpy(np.array(line_list)).reshape(-1,1,2).float()
    output,pred=module(line_tensor)
    pred=pred.squeeze().detach().numpy().tolist()
    for n in range(len(pred)):
        if pred[n]<0:
            line_array.append(line_list[n])
    line_array=np.array(line_array)
    plt.scatter(line_array[:, 0], line_array[:, 1], c="g")
    plt.scatter(pt_r[:,0],pt_r[:,1],c="r")
    plt.scatter(pt_b[:, 0],pt_b[:, 1],c="b")
    plt.xlim(-1,2)
    plt.ylim(-1, 2)
    plt.show()

criterion = nn.BCELoss()
md=logistic()
opt=torch.optim.Adam(md.parameters(),lr=0.001)
pt_list,cls_list=generate_point()
# input=torch.from_numpy(pt_list).reshape(-1,1,2).float()
# label=torch.from_numpy(cls_list).reshape(-1,1,1).float()

train_dataset = fdata()
train_dataloder = DataLoader(train_dataset, batch_size=10,
                            num_workers=0, drop_last=True,shuffle=True)
for i in range(epochs):
    for input,label in train_dataloder:
        output,pred=md(input)
        loss=criterion(output,label)
        opt.zero_grad()
        loss.backward()
        opt.step()
        print(""+str(i)+":"+str(loss))
draw(pt_list,cls_list,md)

这个实验揭示了一个结果:带有激活函数的全连接层(至少两层)越宽,其拟合能力越强。

其实我不是很明白,为什么“低维映射到高维,经过激活函数,低维线性映射到高维后,全连接层就具有了很强的非线性能力”?