This commit is contained in:
yanjie 2021-10-27 21:58:09 +08:00
parent c032d28a5c
commit 9f6a280d62
1 changed files with 154 additions and 0 deletions

154
Densenet.py Normal file
View File

@ -0,0 +1,154 @@
import os
os.environ['TL_BACKEND'] = 'tensorflow'
import time
import tensorlayer as tl
from torchsummary import summary
from tensorlayer import logging
from tensorlayer.files import (assign_weights,load_cifar10_dataset,)
from tensorlayer.layers import (BatchNorm, Conv2d, Dense, Elementwise, GlobalMeanPool2d, Input, MaxPool2d,MeanPool2d,AdaptiveMeanPool2d)
from tensorlayer.layers import Module, SequentialLayer
class _DenseLayer(Module):
#bn-relu-conv1x1-bn-relu-conv3x3
"""docstring for _DenseLayer"""
def __init__(self, in_channels, growth_rate, bn_size, memory_efficient=False):
super(_DenseLayer,self).__init__()
self.bn1 = BatchNorm(num_features=bn_size * growth_rate,act='relu')
#self.relu1 = PReLU()
self.conv1 = Conv2d(growth_rate*bn_size, (1,1), in_channels=in_channels, strides=(1,1),padding='SAME',b_init=None)
self.bn2 = BatchNorm(num_features=growth_rate,act='relu')
#self.relu2 = PReLU()
self.conv2 = Conv2d(growth_rate, (3,3), in_channels=bn_size * growth_rate, strides=(1,1),padding='SAME',b_init=None)
self.add = Elementwise(tl.add, act='relu')
def forward(self,inputs):
output = self.bn1(inputs)
#output = self.relu1(output)
output = self.conv1(output)
output = self.bn2(output)
#output = self.relu2(output)
output = self.conv2(output)
result = self.add([output, inputs])
return result
class _DenseBlock(Module):
"""docstring for ClassName"""
def __init__(self, num_layers, in_channels, bn_size, growth_rate):
super(_DenseBlock, self).__init__()
self.layers = []
for i in range(num_layers):
self.layers.append(_DenseLayer(in_channels + i*growth_rate,growth_rate,bn_size))
self.dense_block = SequentialLayer(self.layers)
def forward(self):
return self.dense_block
class _Transition(Module):
"""docstring for _Transition"""
def __init__(self, num_input_features,num_output_features):
super(_Transition, self).__init__()
self.bn1 = BatchNorm(num_features=num_output_features,act='relu')
self.conv1 = Conv2d(num_output_features,in_channels=num_input_features)
self.pooling = MeanPool2d((2,2),strides=(2,2),padding='SAME')
#self.add = Elementwise(tl.add, act='relu')
def forward(self,inputs):
output = self.bn(inputs)
output = self.conv1(output)
output = self.pooling(output)
return output
class Densenet(Module):
"""docstring for ClassName"""
def __init__(self, growth_rate=12, block_config=(6, 12, 24, 16),
bn_size=4, num_classes=10,num_init_features=24):
super(Densenet, self).__init__()
num_init_feature = 2 * growth_rate
#self.layers_list = []
self.conv1 = Conv2d(num_init_features,(7,7),strides=(2,2),padding="valid",in_channels=3)
self.bn1 = BatchNorm(num_features=num_init_features,act='relu')
self.pooling = MaxPool2d((3, 3), strides=(2, 2))
self.layers_list = []
num_input_features = num_init_features
for i,num_layers in enumerate(block_config):
block = _DenseBlock(num_layers,num_input_features, bn_size,growth_rate)
self.layers_list.append(block)
num_input_features = num_input_features + growth_rate * num_layers
if i != len(block_config) - 1:
trans = _Transition(num_input_features=num_input_features,
num_output_features=int(num_input_features // 2))
self.layers_list.append(trans)
num_input_features = int(num_input_features // 2)
self.layers_list.append(BatchNorm(num_features=num_input_features,act='relu'))
self.layers_list.append(AdaptiveMeanPool2d((1,1)))
self.features = SequentialLayer(self.layers_list)
self.classifier = Dense(in_channels = num_input_features, n_units = num_classes)
#self.classfier =
def forward(self,x):
output = self.conv1(x)
output = self.bn1(output)
output = self.pooling(output)
output = self.features(output)
output = features.view(output.size(0), -1)
output = self.classifier(output)
return output
def DenseNet(s: str,pretrain: bool):
if(s == 'densenet-100'):
return Densenet(growth_rate=32, block_config=(6, 12, 18, 16),num_init_features=64, num_classes=10)
if(s == 'densenet-121'):
return Densenet(growth_rate=32, block_config=(6, 12, 24, 16),num_init_features=64, num_classes=1000)
def densenet121():
r"""Densenet-121 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_.
The required minimum input size of the model is 29x29.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
memory_efficient (bool) - If True, uses checkpointing. Much more memory efficient,
but slower. Default: *False*. See `"paper" <https://arxiv.org/pdf/1707.06990.pdf>`_.
"""
return densenet(growth_rate=32, block_config = (6, 12, 24, 16), num_init_features=64, num_classes=1000)
def densenet161(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> DenseNet:
r"""Densenet-161 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_.
The required minimum input size of the model is 29x29.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
memory_efficient (bool) - If True, uses checkpointing. Much more memory efficient,
but slower. Default: *False*. See `"paper" <https://arxiv.org/pdf/1707.06990.pdf>`_.
"""
return densenet(growth_rate=48, block_config = (6, 12, 36, 24), num_init_features=96, num_classes=1000)
def densenet169(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> DenseNet:
r"""Densenet-169 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_.
The required minimum input size of the model is 29x29.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
memory_efficient (bool) - If True, uses checkpointing. Much more memory efficient,
but slower. Default: *False*. See `"paper" <https://arxiv.org/pdf/1707.06990.pdf>`_.
"""
return densenet(growth_rate=32, block_config = (6, 12, 32, 32), num_init_features=64, num_classes=1000)
def densenet201(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> DenseNet:
r"""Densenet-201 model from
`"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>`_.
The required minimum input size of the model is 29x29.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
progress (bool): If True, displays a progress bar of the download to stderr
memory_efficient (bool) - If True, uses checkpointing. Much more memory efficient,
but slower. Default: *False*. See `"paper" <https://arxiv.org/pdf/1707.06990.pdf>`_.
"""
return densenet(growth_rate=32, block_config = (6, 12, 48, 32), num_init_features=64, num_classes=1000)