import torch.nn.functional as F
import torch.nn as nn
from modeling.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d
from torchsummary import summary
def conv_bn(inp, oup, stride, BatchNorm):
return nn.Sequential(
nn.Conv2d(inp, oup, 3, stride, 1, bias=False),
BatchNorm(oup),
nn.ReLU6(inplace=True)
)
class test(nn.Module):
def __init__(self):
super(test, self).__init__()
input_channel = 32
self.features = [conv_bn(3, input_channel, 2, SynchronizedBatchNorm2d)]
self.features.append(conv_bn(input_channel, input_channel, 1, SynchronizedBatchNorm2d))
self.features.append(conv_bn(input_channel, input_channel, 1, SynchronizedBatchNorm2d))
self.features = nn.Sequential(*self.features)
self.low_features = self.features[0] # this one is weird
# print(self.features)
# print(self.low_features)
def forward(self,x):
x = self.features(x)
print(x.shape)
return x
b = test().cuda()
summary(b,(3,960,960))
----------------------------------------------------------------
Layer (type) Output Shape Param #
================================================================
Conv2d-1 [-1, 32, 480, 480] 864
Conv2d-2 [-1, 32, 480, 480] 864
SynchronizedBatchNorm2d-3 [-1, 32, 480, 480] 64
SynchronizedBatchNorm2d-4 [-1, 32, 480, 480] 64
ReLU6-5 [-1, 32, 480, 480] 0
ReLU6-6 [-1, 32, 480, 480] 0
Conv2d-7 [-1, 32, 480, 480] 9,216
SynchronizedBatchNorm2d-8 [-1, 32, 480, 480] 64
ReLU6-9 [-1, 32, 480, 480] 0
Conv2d-10 [-1, 32, 480, 480] 9,216
SynchronizedBatchNorm2d-11 [-1, 32, 480, 480] 64
ReLU6-12 [-1, 32, 480, 480] 0
================================================================
Total params: 20,416
Trainable params: 20,416
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 10.55
Forward/backward pass size (MB): 675.00
Params size (MB): 0.08
Estimated Total Size (MB): 685.62
self.low_features 부분처럼 특정부분을 변수에 저장하면 의도하지 않은 레이어가 중복이 됩니다.
summary의 에러인지 아니면 제가 모르는 부분이 있는건지 모르겠습니다.
왜 저렇게 되는건가요?
또 저렇게 되면 중복된 레이어들이 output에 영향을 주는지 확인할 방법이 있나요?