import numpy as np
import pandas as pd
import os
import random
import matplotlib.pyplot as plt
plt.style.use('seaborn-white')
import cv2
from sklearn.model_selection import StratifiedKFold
from keras.models import Model
from keras.layers import Input, Activation
from keras.layers.core import Lambda
from keras.layers.convolutional import Conv2D, Conv2DTranspose
from keras.layers.pooling import MaxPooling2D
from keras.layers.merge import concatenate
from keras.callbacks import Callback, LearningRateScheduler, EarlyStopping, ModelCheckpoint, ReduceLROnPlateau
from keras import backend as K
from keras import optimizers
from keras import layers
from keras.losses import binary_crossentropy
import tensorflow as tf
from keras.preprocessing.image import array_to_img, img_to_array, load_img
def convBn2d(input_tensor, filters, stage, block, kernel_size=(3, 3)):
bn_axis = 3
bn_name_base = 'bn' + str(stage) + block + '_branch'
conv_name_base = 'conv2d' + str(stage) + block + '_branch'
x = layers.BatchNormalization(axis=bn_axis, name=bn_name_base)(input_tensor)
x = layers.Activation('relu')(x)
x = layers.Conv2D(filters, kernel_size,
padding='same',
kernel_initializer='he_normal',
name=conv_name_base)(x)
return x
def identity_block(input_tensor, filters, stage, block, kernel_size=(3, 3)):
"""The identity block is the block that has no conv layer at shortcut.
# Arguments
input_tensor: input tensor
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
# Returns
Output tensor for the block.
"""
filters1, filters2 = [filters] * 2
bn_axis = 3
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(input_tensor)
x = layers.Activation('relu')(x)
x = layers.Conv2D(filters1, kernel_size,
padding='same',
kernel_initializer='he_normal',
name=conv_name_base + '2a')(x)
x = layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x)
x = layers.Activation('relu')(x)
# x = layers.Dropout(0.3)(x)
x = layers.Conv2D(filters2, kernel_size,
padding='same',
kernel_initializer='he_normal',
name=conv_name_base + '2b')(x)
x = layers.add([x, input_tensor])
return x
def conv_block(input_tensor,
filters,
stage,
block,
strides=(2, 2),
kernel_size=(3, 3)):
"""A block that has a conv layer at shortcut.
# Arguments
input_tensor: input tensor
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
strides: Strides for the first conv layer in the block.
# Returns
Output tensor for the block.
Note that from stage 3,
the first conv layer at main path is with strides=(2, 2)
And the shortcut should have strides=(2, 2) as well
"""
filters1, filters2, filters3 = [filters] * 3
bn_axis = 3
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(input_tensor)
x = layers.Activation('relu')(x)
x = layers.Conv2D(filters1, (2, 2), strides=strides,
padding='valid',
kernel_initializer='he_normal',
name=conv_name_base + '2a')(x)
x = layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x)
x = layers.Activation('relu')(x)
# x = layers.Dropout(0.3)(x)
x = layers.Conv2D(filters2, kernel_size, padding='same',
kernel_initializer='he_normal',
name=conv_name_base + '2b')(x)
shortcut = layers.BatchNormalization(
axis=bn_axis, name=bn_name_base + '1')(input_tensor)
shortcut = layers.Activation('relu')(shortcut)
shortcut = layers.Conv2D(filters3, (2, 2), strides=strides,
padding='valid',
kernel_initializer='he_normal',
name=conv_name_base + '1')(shortcut)
x = layers.add([x, shortcut])
return x
def cse_block(prevlayer, prefix):
mean = layers.Lambda(lambda xin: K.mean(xin, axis=[1, 2]))(prevlayer)
# mean = layers.Dropout(0.1)(mean)
lin1 = layers.Dense(K.int_shape(prevlayer)[3] // 2, name=prefix + 'cse_lin1', activation='relu')(mean)
# lin1 = layers.Dropout(0.1)(lin1)
lin2 = layers.Dense(K.int_shape(prevlayer)[3], name=prefix + 'cse_lin2', activation='sigmoid')(lin1)
x = layers.Multiply()([prevlayer, lin2])
return x
def sse_block(prevlayer, prefix):
# conv = layers.Conv2D(K.int_shape(prevlayer)[3], (1, 1), padding="same", kernel_initializer="he_normal", activation='sigmoid', strides=(1, 1),
# name=prefix + "_conv")(prevlayer)
conv = layers.Conv2D(1, (1, 1), padding="same", kernel_initializer="he_normal", activation='sigmoid',
strides=(1, 1),
name=prefix + "_conv")(prevlayer)
conv = layers.Multiply(name=prefix + "_mul")([prevlayer, conv])
return conv
def csse_block(x, prefix):
'''
Implementation of Concurrent Spatial and Channel ‘Squeeze & Excitation’ in Fully Convolutional Networks
https://arxiv.org/abs/1803.02579
'''
cse = cse_block(x, prefix)
sse = sse_block(x, prefix)
x = layers.Add(name=prefix + "_csse_mul")([cse, sse])
return x
def UResNet(input_shape, start_neurons,classes,hc=True,scse=False,block_num=5):
'''
input_layer is designed to (128,128,3)
'''
# 128 -> 128
input_layer=Input(input_shape)
conv1 = Conv2D(start_neurons * 1, (3, 3), activation=None, padding="same")(input_layer)
conv1 = identity_block(conv1, filters=start_neurons * 1, stage=1, block='a')
conv1 = identity_block(conv1, filters=start_neurons * 1, stage=1, block='b')
conv1 = identity_block(conv1, filters=start_neurons * 1, stage=1, block='c')
if(scse==True):
conv1 = csse_block(conv1, 'stage1')
# 128 -> 64
conv2 = conv_block(conv1, filters=start_neurons * 2, stage=2, block='a')
conv2 = identity_block(conv2, filters=start_neurons * 2, stage=2, block='b')
conv2 = identity_block(conv2, filters=start_neurons * 2, stage=2, block='c')
conv2 = identity_block(conv2, filters=start_neurons * 2, stage=2, block='d')
if(scse==True):
conv2 = csse_block(conv2, 'stage2')
# 64 -> 32
conv3 = conv_block(conv2, filters=start_neurons * 4, stage=3, block='a')
conv3 = identity_block(conv3, filters=start_neurons * 4, stage=3, block='b')
conv3 = identity_block(conv3, filters=start_neurons * 4, stage=3, block='c')
conv3 = identity_block(conv3, filters=start_neurons * 4, stage=3, block='d')
conv3 = identity_block(conv3, filters=start_neurons * 4, stage=3, block='e')
conv3 = identity_block(conv3, filters=start_neurons * 4, stage=3, block='f')
if(scse==True):
conv3 = csse_block(conv3,