ResNet 구조
ResNet 구조 (위)
ResNet N-Layer 구조
구현
import tensorflow as tf
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.layers import (
Activation, BatchNormalization, Dense, Flatten, Add, Input,
Conv2D, MaxPool2D, GlobalAvgPool2D,
Concatenate
)
from tensorflow.keras.regularizers import l2
사용할 구조 정의
BN-ReLU-Conv (Pre-Activation)
Block 구조
def _bn_relu(inputs):
x = BatchNormalization()(inputs)
x = Activation('relu')(x)
return x
def _conv(inputs, filters, kernel_size, strides=1,padding='same',
kernel_initializer='he_normal', kernel_regularizer=l2(1.e-4)):
x = Conv2D(filters, kernel_size, strides=strides, padding=padding,
kernel_initializer=kernel_initializer,
kernel_regularizer=kernel_regularizer)(inputs)
return x
def _bn_relu_conv(inputs, filters, kernel_size, strides=1, padding='same',
kernel_initializer='he_normal', kernel_regularizer=l2(1.e-4)):
# pre activation
x = _bn_relu(inputs)
x = _conv(x, filters, kernel_size, strides=strides, padding=padding,
kernel_initializer=kernel_initializer,
kernel_regularizer=kernel_regularizer)
return x
def _conv_bn_relu(inputs, filters, kernel_size, strides=1, padding='same',
kernel_initializer='he_normal', kernel_regularizer=l2(1.e-4)):
x = _conv(inputs, filters, kernel_size, strides=strides, padding=padding,
kernel_initializer=kernel_initializer,
kernel_regularizer=kernel_regularizer)
x = _bn_relu(x)
return x
def bottleneck(inputs, filters, strides=1):
x = _conv(inputs, filters, (1,1), strides=strides, padding='valid')
return x
def conv_block(inputs,filters,rep,is_first=False):
shortcut = bottleneck(inputs, filters*4, strides=(2 if not is_first else 1))
shortcut = BatchNormalization()(shortcut)
x = inputs
for i in range(rep):
x = bottleneck(x, filters, strides=(2 if not is_first and i==0 else 1))
x = _bn_relu_conv(x, filters, (3,3))
x = _bn_relu_conv(x, filters*4, (1,1), padding='valid')
x = BatchNormalization()(x)
x = Add()([x,shortcut])
x = Activation('relu')(x)
shortcut = x
return x
ResNet
def ResNet50():
inputs = Input(shape=(224,224,3))
x = _conv_bn_relu(inputs, 64, (7,7), strides=2, padding='same')
x = MaxPool2D((3,3), strides=2)(x)
x = conv_block(x, 64, 3, True)
x = conv_block(x, 128, 4)
x = conv_block(x, 256, 6)
x = conv_block(x, 512, 3)
x = GlobalAvgPool2D()(x)
outputs = Dense(num_class, activation='softmax')(x)
model = Model(inputs, outputs)
return model
Model Summary
resnet = ResNet50()
resnet.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['acc'])
resnet.summary()
'''
Model: "model"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) [(None, 224, 224, 3) 0
__________________________________________________________________________________________________
conv2d (Conv2D) (None, 112, 112, 64) 9472 input_1[0][0]
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, 112, 112, 64) 256 conv2d[0][0]
__________________________________________________________________________________________________
activation (Activation) (None, 112, 112, 64) 0 batch_normalization[0][0]
__________________________________________________________________________________________________
max_pooling2d (MaxPooling2D) (None, 56, 56, 64) 0 activation[0][0]
__________________________________________________________________________________________________
conv2d_2 (Conv2D) (None, 56, 56, 64) 4160 max_pooling2d[0][0]
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 56, 56, 64) 256 conv2d_2[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, 56, 56, 64) 0 batch_normalization_2[0][0]
__________________________________________________________________________________________________
conv2d_3 (Conv2D) (None, 56, 56, 64) 36928 activation_1[0][0]
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, 56, 56, 64) 256 conv2d_3[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, 56, 56, 64) 0 batch_normalization_3[0][0]
__________________________________________________________________________________________________
conv2d_4 (Conv2D) (None, 56, 56, 256) 16640 activation_2[0][0]
__________________________________________________________________________________________________
conv2d_1 (Conv2D) (None, 56, 56, 256) 16640 max_pooling2d[0][0]
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, 56, 56, 256) 1024 conv2d_4[0][0]
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 56, 56, 256) 1024 conv2d_1[0][0]
__________________________________________________________________________________________________
add (Add) (None, 56, 56, 256) 0 batch_normalization_4[0][0]
batch_normalization_1[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, 56, 56, 256) 0 add[0][0]
__________________________________________________________________________________________________
conv2d_5 (Conv2D) (None, 56, 56, 64) 16448 activation_3[0][0]
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, 56, 56, 64) 256 conv2d_5[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, 56, 56, 64) 0 batch_normalization_5[0][0]
__________________________________________________________________________________________________
conv2d_6 (Conv2D) (None, 56, 56, 64) 36928 activation_4[0][0]
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, 56, 56, 64) 256 conv2d_6[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, 56, 56, 64) 0 batch_normalization_6[0][0]
__________________________________________________________________________________________________
conv2d_7 (Conv2D) (None, 56, 56, 256) 16640 activation_5[0][0]
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, 56, 56, 256) 1024 conv2d_7[0][0]
__________________________________________________________________________________________________
add_1 (Add) (None, 56, 56, 256) 0 batch_normalization_7[0][0]
activation_3[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, 56, 56, 256) 0 add_1[0][0]
__________________________________________________________________________________________________
conv2d_8 (Conv2D) (None, 56, 56, 64) 16448 activation_6[0][0]
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, 56, 56, 64) 256 conv2d_8[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, 56, 56, 64) 0 batch_normalization_8[0][0]
__________________________________________________________________________________________________
conv2d_9 (Conv2D) (None, 56, 56, 64) 36928 activation_7[0][0]
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, 56, 56, 64) 256 conv2d_9[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, 56, 56, 64) 0 batch_normalization_9[0][0]
__________________________________________________________________________________________________
conv2d_10 (Conv2D) (None, 56, 56, 256) 16640 activation_8[0][0]
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, 56, 56, 256) 1024 conv2d_10[0][0]
__________________________________________________________________________________________________
add_2 (Add) (None, 56, 56, 256) 0 batch_normalization_10[0][0]
activation_6[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, 56, 56, 256) 0 add_2[0][0]
__________________________________________________________________________________________________
conv2d_12 (Conv2D) (None, 28, 28, 128) 32896 activation_9[0][0]
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, 28, 28, 128) 512 conv2d_12[0][0]
__________________________________________________________________________________________________
activation_10 (Activation) (None, 28, 28, 128) 0 batch_normalization_12[0][0]
__________________________________________________________________________________________________
conv2d_13 (Conv2D) (None, 28, 28, 128) 147584 activation_10[0][0]
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, 28, 28, 128) 512 conv2d_13[0][0]
__________________________________________________________________________________________________
activation_11 (Activation) (None, 28, 28, 128) 0 batch_normalization_13[0][0]
__________________________________________________________________________________________________
conv2d_14 (Conv2D) (None, 28, 28, 512) 66048 activation_11[0][0]
__________________________________________________________________________________________________
conv2d_11 (Conv2D) (None, 28, 28, 512) 131584 activation_9[0][0]
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, 28, 28, 512) 2048 conv2d_14[0][0]
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, 28, 28, 512) 2048 conv2d_11[0][0]
__________________________________________________________________________________________________
add_3 (Add) (None, 28, 28, 512) 0 batch_normalization_14[0][0]
batch_normalization_11[0][0]
__________________________________________________________________________________________________
activation_12 (Activation) (None, 28, 28, 512) 0 add_3[0][0]
__________________________________________________________________________________________________
conv2d_15 (Conv2D) (None, 28, 28, 128) 65664 activation_12[0][0]
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, 28, 28, 128) 512 conv2d_15[0][0]
__________________________________________________________________________________________________
activation_13 (Activation) (None, 28, 28, 128) 0 batch_normalization_15[0][0]
__________________________________________________________________________________________________
conv2d_16 (Conv2D) (None, 28, 28, 128) 147584 activation_13[0][0]
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, 28, 28, 128) 512 conv2d_16[0][0]
__________________________________________________________________________________________________
activation_14 (Activation) (None, 28, 28, 128) 0 batch_normalization_16[0][0]
__________________________________________________________________________________________________
conv2d_17 (Conv2D) (None, 28, 28, 512) 66048 activation_14[0][0]
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, 28, 28, 512) 2048 conv2d_17[0][0]
__________________________________________________________________________________________________
add_4 (Add) (None, 28, 28, 512) 0 batch_normalization_17[0][0]
activation_12[0][0]
__________________________________________________________________________________________________
activation_15 (Activation) (None, 28, 28, 512) 0 add_4[0][0]
__________________________________________________________________________________________________
conv2d_18 (Conv2D) (None, 28, 28, 128) 65664 activation_15[0][0]
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, 28, 28, 128) 512 conv2d_18[0][0]
__________________________________________________________________________________________________
activation_16 (Activation) (None, 28, 28, 128) 0 batch_normalization_18[0][0]
__________________________________________________________________________________________________
conv2d_19 (Conv2D) (None, 28, 28, 128) 147584 activation_16[0][0]
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, 28, 28, 128) 512 conv2d_19[0][0]
__________________________________________________________________________________________________
activation_17 (Activation) (None, 28, 28, 128) 0 batch_normalization_19[0][0]
__________________________________________________________________________________________________
conv2d_20 (Conv2D) (None, 28, 28, 512) 66048 activation_17[0][0]
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, 28, 28, 512) 2048 conv2d_20[0][0]
__________________________________________________________________________________________________
add_5 (Add) (None, 28, 28, 512) 0 batch_normalization_20[0][0]
activation_15[0][0]
__________________________________________________________________________________________________
activation_18 (Activation) (None, 28, 28, 512) 0 add_5[0][0]
__________________________________________________________________________________________________
conv2d_21 (Conv2D) (None, 28, 28, 128) 65664 activation_18[0][0]
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, 28, 28, 128) 512 conv2d_21[0][0]
__________________________________________________________________________________________________
activation_19 (Activation) (None, 28, 28, 128) 0 batch_normalization_21[0][0]
__________________________________________________________________________________________________
conv2d_22 (Conv2D) (None, 28, 28, 128) 147584 activation_19[0][0]
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, 28, 28, 128) 512 conv2d_22[0][0]
__________________________________________________________________________________________________
activation_20 (Activation) (None, 28, 28, 128) 0 batch_normalization_22[0][0]
__________________________________________________________________________________________________
conv2d_23 (Conv2D) (None, 28, 28, 512) 66048 activation_20[0][0]
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, 28, 28, 512) 2048 conv2d_23[0][0]
__________________________________________________________________________________________________
add_6 (Add) (None, 28, 28, 512) 0 batch_normalization_23[0][0]
activation_18[0][0]
__________________________________________________________________________________________________
activation_21 (Activation) (None, 28, 28, 512) 0 add_6[0][0]
__________________________________________________________________________________________________
conv2d_25 (Conv2D) (None, 14, 14, 256) 131328 activation_21[0][0]
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, 14, 14, 256) 1024 conv2d_25[0][0]
__________________________________________________________________________________________________
activation_22 (Activation) (None, 14, 14, 256) 0 batch_normalization_25[0][0]
__________________________________________________________________________________________________
conv2d_26 (Conv2D) (None, 14, 14, 256) 590080 activation_22[0][0]
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, 14, 14, 256) 1024 conv2d_26[0][0]
__________________________________________________________________________________________________
activation_23 (Activation) (None, 14, 14, 256) 0 batch_normalization_26[0][0]
__________________________________________________________________________________________________
conv2d_27 (Conv2D) (None, 14, 14, 1024) 263168 activation_23[0][0]
__________________________________________________________________________________________________
conv2d_24 (Conv2D) (None, 14, 14, 1024) 525312 activation_21[0][0]
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, 14, 14, 1024) 4096 conv2d_27[0][0]
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, 14, 14, 1024) 4096 conv2d_24[0][0]
__________________________________________________________________________________________________
add_7 (Add) (None, 14, 14, 1024) 0 batch_normalization_27[0][0]
batch_normalization_24[0][0]
__________________________________________________________________________________________________
activation_24 (Activation) (None, 14, 14, 1024) 0 add_7[0][0]
__________________________________________________________________________________________________
conv2d_28 (Conv2D) (None, 14, 14, 256) 262400 activation_24[0][0]
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, 14, 14, 256) 1024 conv2d_28[0][0]
__________________________________________________________________________________________________
activation_25 (Activation) (None, 14, 14, 256) 0 batch_normalization_28[0][0]
__________________________________________________________________________________________________
conv2d_29 (Conv2D) (None, 14, 14, 256) 590080 activation_25[0][0]
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, 14, 14, 256) 1024 conv2d_29[0][0]
__________________________________________________________________________________________________
activation_26 (Activation) (None, 14, 14, 256) 0 batch_normalization_29[0][0]
__________________________________________________________________________________________________
conv2d_30 (Conv2D) (None, 14, 14, 1024) 263168 activation_26[0][0]
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, 14, 14, 1024) 4096 conv2d_30[0][0]
__________________________________________________________________________________________________
add_8 (Add) (None, 14, 14, 1024) 0 batch_normalization_30[0][0]
activation_24[0][0]
__________________________________________________________________________________________________
activation_27 (Activation) (None, 14, 14, 1024) 0 add_8[0][0]
__________________________________________________________________________________________________
conv2d_31 (Conv2D) (None, 14, 14, 256) 262400 activation_27[0][0]
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, 14, 14, 256) 1024 conv2d_31[0][0]
__________________________________________________________________________________________________
activation_28 (Activation) (None, 14, 14, 256) 0 batch_normalization_31[0][0]
__________________________________________________________________________________________________
conv2d_32 (Conv2D) (None, 14, 14, 256) 590080 activation_28[0][0]
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, 14, 14, 256) 1024 conv2d_32[0][0]
__________________________________________________________________________________________________
activation_29 (Activation) (None, 14, 14, 256) 0 batch_normalization_32[0][0]
__________________________________________________________________________________________________
conv2d_33 (Conv2D) (None, 14, 14, 1024) 263168 activation_29[0][0]
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, 14, 14, 1024) 4096 conv2d_33[0][0]
__________________________________________________________________________________________________
add_9 (Add) (None, 14, 14, 1024) 0 batch_normalization_33[0][0]
activation_27[0][0]
__________________________________________________________________________________________________
activation_30 (Activation) (None, 14, 14, 1024) 0 add_9[0][0]
__________________________________________________________________________________________________
conv2d_34 (Conv2D) (None, 14, 14, 256) 262400 activation_30[0][0]
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, 14, 14, 256) 1024 conv2d_34[0][0]
__________________________________________________________________________________________________
activation_31 (Activation) (None, 14, 14, 256) 0 batch_normalization_34[0][0]
__________________________________________________________________________________________________
conv2d_35 (Conv2D) (None, 14, 14, 256) 590080 activation_31[0][0]
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, 14, 14, 256) 1024 conv2d_35[0][0]
__________________________________________________________________________________________________
activation_32 (Activation) (None, 14, 14, 256) 0 batch_normalization_35[0][0]
__________________________________________________________________________________________________
conv2d_36 (Conv2D) (None, 14, 14, 1024) 263168 activation_32[0][0]
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, 14, 14, 1024) 4096 conv2d_36[0][0]
__________________________________________________________________________________________________
add_10 (Add) (None, 14, 14, 1024) 0 batch_normalization_36[0][0]
activation_30[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, 14, 14, 1024) 0 add_10[0][0]
__________________________________________________________________________________________________
conv2d_37 (Conv2D) (None, 14, 14, 256) 262400 activation_33[0][0]
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, 14, 14, 256) 1024 conv2d_37[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, 14, 14, 256) 0 batch_normalization_37[0][0]
__________________________________________________________________________________________________
conv2d_38 (Conv2D) (None, 14, 14, 256) 590080 activation_34[0][0]
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, 14, 14, 256) 1024 conv2d_38[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, 14, 14, 256) 0 batch_normalization_38[0][0]
__________________________________________________________________________________________________
conv2d_39 (Conv2D) (None, 14, 14, 1024) 263168 activation_35[0][0]
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, 14, 14, 1024) 4096 conv2d_39[0][0]
__________________________________________________________________________________________________
add_11 (Add) (None, 14, 14, 1024) 0 batch_normalization_39[0][0]
activation_33[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, 14, 14, 1024) 0 add_11[0][0]
__________________________________________________________________________________________________
conv2d_40 (Conv2D) (None, 14, 14, 256) 262400 activation_36[0][0]
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, 14, 14, 256) 1024 conv2d_40[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, 14, 14, 256) 0 batch_normalization_40[0][0]
__________________________________________________________________________________________________
conv2d_41 (Conv2D) (None, 14, 14, 256) 590080 activation_37[0][0]
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, 14, 14, 256) 1024 conv2d_41[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, 14, 14, 256) 0 batch_normalization_41[0][0]
__________________________________________________________________________________________________
conv2d_42 (Conv2D) (None, 14, 14, 1024) 263168 activation_38[0][0]
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, 14, 14, 1024) 4096 conv2d_42[0][0]
__________________________________________________________________________________________________
add_12 (Add) (None, 14, 14, 1024) 0 batch_normalization_42[0][0]
activation_36[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, 14, 14, 1024) 0 add_12[0][0]
__________________________________________________________________________________________________
conv2d_44 (Conv2D) (None, 7, 7, 512) 524800 activation_39[0][0]
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, 7, 7, 512) 2048 conv2d_44[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, 7, 7, 512) 0 batch_normalization_44[0][0]
__________________________________________________________________________________________________
conv2d_45 (Conv2D) (None, 7, 7, 512) 2359808 activation_40[0][0]
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, 7, 7, 512) 2048 conv2d_45[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, 7, 7, 512) 0 batch_normalization_45[0][0]
__________________________________________________________________________________________________
conv2d_46 (Conv2D) (None, 7, 7, 2048) 1050624 activation_41[0][0]
__________________________________________________________________________________________________
conv2d_43 (Conv2D) (None, 7, 7, 2048) 2099200 activation_39[0][0]
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, 7, 7, 2048) 8192 conv2d_46[0][0]
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, 7, 7, 2048) 8192 conv2d_43[0][0]
__________________________________________________________________________________________________
add_13 (Add) (None, 7, 7, 2048) 0 batch_normalization_46[0][0]
batch_normalization_43[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, 7, 7, 2048) 0 add_13[0][0]
__________________________________________________________________________________________________
conv2d_47 (Conv2D) (None, 7, 7, 512) 1049088 activation_42[0][0]
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, 7, 7, 512) 2048 conv2d_47[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, 7, 7, 512) 0 batch_normalization_47[0][0]
__________________________________________________________________________________________________
conv2d_48 (Conv2D) (None, 7, 7, 512) 2359808 activation_43[0][0]
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, 7, 7, 512) 2048 conv2d_48[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, 7, 7, 512) 0 batch_normalization_48[0][0]
__________________________________________________________________________________________________
conv2d_49 (Conv2D) (None, 7, 7, 2048) 1050624 activation_44[0][0]
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, 7, 7, 2048) 8192 conv2d_49[0][0]
__________________________________________________________________________________________________
add_14 (Add) (None, 7, 7, 2048) 0 batch_normalization_49[0][0]
activation_42[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, 7, 7, 2048) 0 add_14[0][0]
__________________________________________________________________________________________________
conv2d_50 (Conv2D) (None, 7, 7, 512) 1049088 activation_45[0][0]
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, 7, 7, 512) 2048 conv2d_50[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, 7, 7, 512) 0 batch_normalization_50[0][0]
__________________________________________________________________________________________________
conv2d_51 (Conv2D) (None, 7, 7, 512) 2359808 activation_46[0][0]
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, 7, 7, 512) 2048 conv2d_51[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, 7, 7, 512) 0 batch_normalization_51[0][0]
__________________________________________________________________________________________________
conv2d_52 (Conv2D) (None, 7, 7, 2048) 1050624 activation_47[0][0]
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, 7, 7, 2048) 8192 conv2d_52[0][0]
__________________________________________________________________________________________________
add_15 (Add) (None, 7, 7, 2048) 0 batch_normalization_52[0][0]
activation_45[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, 7, 7, 2048) 0 add_15[0][0]
__________________________________________________________________________________________________
global_average_pooling2d (Globa (None, 2048) 0 activation_48[0][0]
__________________________________________________________________________________________________
dense (Dense) (None, 10) 20490 global_average_pooling2d[0][0]
==================================================================================================
Total params: 23,608,202
Trainable params: 23,555,082
Non-trainable params: 53,120
__________________________________________________________________________________________________
'''
댓글