帮我给这段代码添加Dropout层来防止过拟合。使用BatchNorm层加快模型收敛速度。使用nnModuleList来实现更清晰的代码结构。class LeNetnnModule def __init__self superLeNetself__init__ selfconv1 = nnSequential nnConv2din_chann
import torch.nn.functional as F
class LeNet(nn.Module): def init(self): super(LeNet,self).init() self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1,out_channels=6,kernel_size=5,stride=1,padding=2), nn.BatchNorm2d(6), nn.ReLU(), nn.Dropout(0.5), nn.AvgPool2d(kernel_size=2,stride=2) )
self.conv2 = nn.Sequential(
nn.Conv2d(in_channels=6,out_channels=16,kernel_size=5,stride=1),
nn.BatchNorm2d(16),
nn.ReLU(),
nn.Dropout(0.5),
nn.AvgPool2d(kernel_size=2,stride=2)
)
self.conv3 = nn.Sequential(
nn.Conv2d(in_channels=16,out_channels=120,kernel_size=5),
nn.BatchNorm2d(120),
nn.ReLU()
)
self.fc1 = nn.Sequential(
nn.Linear(120,84),
nn.BatchNorm1d(84),
nn.ReLU(),
nn.Dropout(0.5)
)
self.fc2 = nn.Linear(84,10)
def forward(self,x):
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = x.view(x.size()[0],-1)
x = self.fc1(x)
x = self.fc2(x)
return
原文地址: https://www.cveoy.top/t/topic/cTrW 著作权归作者所有。请勿转载和采集!