import torch
import torch.nn as nn
import torch.nn.functional as F
class DenseLayer(nn.Module):
def __init__(self,num_input_features, growth_rate, bn_size, drop_rate):
super(DenseLayer,self).__init__()
self.norm1=nn.BatchNorm2d(num_input_features)
self.relu1=nn.ReLU(inplace=True)
self.conv1=nn.Conv2d(num_input_features,bn_size*growth_rate,kernel_size=1,stride=1,bias=False)
self.norm2=nn.BatchNorm2d(bn_size*growth_rate)
self.relu2=nn.ReLU(inplace=True)
self.conv2=nn.Conv2d(bn_size*growth_rate,growth_rate,kernel_size=3,stride=1,padding=1,bias=False)
self.drop_rate=drop_rate
def forward(self,x):
new_features=self.norm1(x)
new_features=self.relu1(new_features)
new_features=self.conv1(new_features)
new_features=self.norm2(new_features)
new_features=self.relu2(new_features)
new_features=self.conv2(new_features)
if self.drop_rate>0:
new_features=F.dropout(new_features, p=self.drop_rate, training=self.training)
return torch.cat([x,new_features],1)
class Net(nn.Module):
def __init__(self):
super(Net,self).__init__()
self.seq=nn.Sequential(
nn.Conv2d(1,64,3,1,1,bias=False),
nn.BatchNorm2d(64),nn.ReLU(inplace=True),
nn.AvgPool2d(2),
DenseLayer(64,32,4,0),
nn.AvgPool2d(2),
DenseLayer(96, 32, 4, 0),
nn.BatchNorm2d(128), nn.ReLU(inplace=True),
nn.AvgPool2d(7)
)
self.classer=nn.Sequential(
nn.Linear(128,256),
nn.BatchNorm1d(256),nn.ReLU(inplace=True),
nn.Linear(256,10),
)
def forward(self,x):
out=self.seq(x)
out=out.view(out.size(0),-1)
out=self.classer(out)
return out
评论0