Residual block 생성
def residual_block(X, f, filters, stride, ver):
X_shortcut = X #shortcut으로 보내주기 위해 저장.
X = Conv2D(filters=F1, (1, 1), strides = (2,2), kernel_initializer = glorot_uniform(seed=0))(X)
X = BatchNormalization(axis = 3)(X)
X = Activation('relu')(X)
X = Conv2D(filters = F2, kernel_size = (1, 1), strides = (1,1), padding = 'valid', kernel_initializer = glorot_uniform(seed=0))(X)
X = BatchNormalization(axis = 3)(X)
if(ver == 'conv'): # identity block이 아닌 conv block인 경우만 실행.
X_shortcut = Conv2D(filters = F2, kernel_size = (1, 1), strides = (s,s), padding = 'valid', name = conv_name_base + '1', kernel_initializer = glorot_uniform(seed=0))(X_shortcut)
X_shortcut = BatchNormalization(axis = 3)(X_shortcut)
X = Add()([X, X_shortcut]) #shortcut으로 받은 이전 output과 현재의 output을 합함
X = Activation('relu')(X)
return X
ResNet 모델 설계
def ResNet(input_shape = (64, 64, 3), classes = 6):
"""
CONV2D -> BATCHNORM -> RELU -> MAXPOOL -> CONVBLOCK -> IDBLOCK*2 -> CONVBLOCK -> IDBLOCK*3
-> CONVBLOCK -> IDBLOCK*5 -> CONVBLOCK -> IDBLOCK*2 -> AVGPOOL -> TOPLAYER
"""
X_input = Input(input_shape)
X = ZeroPadding2D((3, 3))(X_input) # Zero-Padding
X = Conv2D(64, (7, 7), strides = (2, 2), kernel_initializer = glorot_uniform(seed=0))(X)
X = BatchNormalization(axis = 3)(X)
X = Activation('relu')(X)
X = MaxPooling2D((3, 3), strides=(2, 2))(X)
X = residual_block(X, 3, [64, 64, 256], s = 1, conv)
X = residual_block(X, 3, [64, 64, 256]) # x2
X = residual_block(X, 3, [128,128,512], s = 2, conv)
X = residual_block(X, 3, [128,128,512]) # x3
X = residual_block(X, f = 3, [256, 256, 1024], s = 2, conv)
X = residual_block(X, 3, [256, 256, 1024]) # x5
X = residual_block(X, f = 3, [512, 512, 2048], s = 2, conv)
X = residual_block(X, 3, [512, 512, 2048]) # x2
X = AveragePooling2D((2,2), name="avg_pool")(X)
X = Flatten()(X)
X = Dense(classes, activation='softmax', kernel_initializer = glorot_uniform(seed=0))(X)
model = Model(inputs = X_input, outputs = X, name='ResNet50')
return model
모델 훈련 및 테스팅
model = ResNet(input_shape = (64, 64, 3), classes = 6)
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
model.fit(X_train, Y_train, epochs = 20, batch_size = 32)
prediction = model.evaluate(X_test, Y_test)
'VISION' 카테고리의 다른 글
Coursera 딥러닝 과정 수강 - 배운 내용 정리 (0) | 2020.08.20 |
---|---|
PyTorch Basics (0) | 2020.08.02 |
Keras Basics (0) | 2020.07.27 |
TensorFlow Basics (0) | 2020.07.12 |
Gradient Descent Optimization - Mini batch, Momentum, RMS, Adam, Learning rate decay (0) | 2020.06.29 |