-
Notifications
You must be signed in to change notification settings - Fork 0
/
resnet3dmulti.py
287 lines (246 loc) · 12 KB
/
resnet3dmulti.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
"""A vanilla 3D resnet implementation.
Based on JihongJu's 3D implementation keras-resnet3d (See https://github.com/JihongJu/keras-resnet3d). Additional modification was made by https://github.com/alamkanak to add support for multi input.
"""
import six
from math import ceil
from tensorflow import keras
from tensorflow.keras.models import Model
from tensorflow.keras import Input
from tensorflow.keras.layers import (
Conv3D,
AveragePooling3D,
MaxPooling3D,
Activation,
Dense,
Flatten,
add,
BatchNormalization,
Dropout,
Concatenate
)
from tensorflow.keras.regularizers import l2
from tensorflow.keras import backend as K
def _bn_relu(input):
"""Helper to build a BN -> relu block (by @raghakot)."""
norm = BatchNormalization(axis=CHANNEL_AXIS)(input)
return Activation("relu")(norm)
def _conv_bn_relu3D(**conv_params):
filters = conv_params["filters"]
kernel_size = conv_params["kernel_size"]
strides = conv_params.setdefault("strides", (1, 1, 1))
kernel_initializer = conv_params.setdefault(
"kernel_initializer", "he_normal")
padding = conv_params.setdefault("padding", "same")
kernel_regularizer = conv_params.setdefault("kernel_regularizer",
l2(1e-4))
def f(input):
conv = Conv3D(filters=filters, kernel_size=kernel_size,
strides=strides, kernel_initializer=kernel_initializer,
padding=padding,
kernel_regularizer=kernel_regularizer)(input)
return _bn_relu(conv)
return f
def _bn_relu_conv3d(**conv_params):
"""Helper to build a BN -> relu -> conv3d block."""
filters = conv_params["filters"]
kernel_size = conv_params["kernel_size"]
strides = conv_params.setdefault("strides", (1, 1, 1))
kernel_initializer = conv_params.setdefault("kernel_initializer",
"he_normal")
padding = conv_params.setdefault("padding", "same")
kernel_regularizer = conv_params.setdefault("kernel_regularizer",
l2(1e-4))
def f(input):
activation = _bn_relu(input)
return Conv3D(filters=filters, kernel_size=kernel_size,
strides=strides, kernel_initializer=kernel_initializer,
padding=padding,
kernel_regularizer=kernel_regularizer)(activation)
return f
def _shortcut3d(input, residual):
"""3D shortcut to match input and residual and merges them with "sum"."""
stride_dim1 = ceil(input.shape[DIM1_AXIS] / residual.shape[DIM1_AXIS])
stride_dim2 = ceil(input.shape[DIM2_AXIS] / residual.shape[DIM2_AXIS])
stride_dim3 = ceil(input.shape[DIM3_AXIS] / residual.shape[DIM3_AXIS])
equal_channels = residual.shape[CHANNEL_AXIS] == input.shape[CHANNEL_AXIS]
shortcut = input
if stride_dim1 > 1 or stride_dim2 > 1 or stride_dim3 > 1 or not equal_channels:
shortcut = Conv3D(
filters=residual.shape[CHANNEL_AXIS],
kernel_size=(1, 1, 1),
strides=(stride_dim1, stride_dim2, stride_dim3),
kernel_initializer="he_normal", padding="valid",
kernel_regularizer=l2(1e-4)
)(input)
return add([shortcut, residual])
def _residual_block3d(block_function, filters, kernel_regularizer, repetitions,
is_first_layer=False):
def f(input):
for i in range(repetitions):
strides = (1, 1, 1)
if i == 0 and not is_first_layer:
strides = (2, 2, 2)
input = block_function(filters=filters, strides=strides,
kernel_regularizer=kernel_regularizer,
is_first_block_of_first_layer=(
is_first_layer and i == 0)
)(input)
return input
return f
def basic_block(filters, strides=(1, 1, 1), kernel_regularizer=l2(1e-4),
is_first_block_of_first_layer=False):
"""Basic 3 X 3 X 3 convolution blocks. Extended from raghakot's 2D impl."""
def f(input):
if is_first_block_of_first_layer:
# don't repeat bn->relu since we just did bn->relu->maxpool
conv1 = Conv3D(filters=filters, kernel_size=(3, 3, 3),
strides=strides, padding="same",
kernel_initializer="he_normal",
kernel_regularizer=kernel_regularizer
)(input)
else:
conv1 = _bn_relu_conv3d(filters=filters,
kernel_size=(3, 3, 3),
strides=strides,
kernel_regularizer=kernel_regularizer
)(input)
residual = _bn_relu_conv3d(filters=filters, kernel_size=(3, 3, 3),
kernel_regularizer=kernel_regularizer
)(conv1)
return _shortcut3d(input, residual)
return f
def bottleneck(filters, strides=(1, 1, 1), kernel_regularizer=l2(1e-4),
is_first_block_of_first_layer=False):
"""Basic 3 X 3 X 3 convolution blocks. Extended from raghakot's 2D impl."""
def f(input):
if is_first_block_of_first_layer:
# don't repeat bn->relu since we just did bn->relu->maxpool
conv_1_1 = Conv3D(filters=filters, kernel_size=(1, 1, 1),
strides=strides, padding="same",
kernel_initializer="he_normal",
kernel_regularizer=kernel_regularizer
)(input)
else:
conv_1_1 = _bn_relu_conv3d(filters=filters, kernel_size=(1, 1, 1),
strides=strides,
kernel_regularizer=kernel_regularizer
)(input)
conv_3_3 = _bn_relu_conv3d(filters=filters, kernel_size=(3, 3, 3),
kernel_regularizer=kernel_regularizer
)(conv_1_1)
residual = _bn_relu_conv3d(filters=filters * 4, kernel_size=(1, 1, 1),
kernel_regularizer=kernel_regularizer
)(conv_3_3)
return _shortcut3d(input, residual)
return f
def _handle_data_format():
global DIM1_AXIS
global DIM2_AXIS
global DIM3_AXIS
global CHANNEL_AXIS
if K.image_data_format() == 'channels_last':
DIM1_AXIS = 1
DIM2_AXIS = 2
DIM3_AXIS = 3
CHANNEL_AXIS = 4
else:
CHANNEL_AXIS = 1
DIM1_AXIS = 2
DIM2_AXIS = 3
DIM3_AXIS = 4
def _get_block(identifier):
if isinstance(identifier, six.string_types):
res = globals().get(identifier)
if not res:
raise ValueError('Invalid {}'.format(identifier))
return res
return identifier
class Resnet3DBuilder(object):
"""ResNet3D."""
@staticmethod
def build(input_shape, num_outputs, block_fn, repetitions, reg_factor, dropout, bias_initializer):
"""Instantiate a vanilla ResNet3D keras model.
# Arguments
input_shape: Tuple of input shape in the format
(conv_dim1, conv_dim2, conv_dim3, channels) if dim_ordering='tf'
(filter, conv_dim1, conv_dim2, conv_dim3) if dim_ordering='th'
num_outputs: The number of outputs at the final softmax layer
block_fn: Unit block to use {'basic_block', 'bottlenack_block'}
repetitions: Repetitions of unit blocks
dropout: Use 0 if no dropout is desired. Otherwise dropout rate.
# Returns
model: a 3D ResNet model that takes a 5D tensor (volumetric images
in batch) as input and returns a 1D vector (prediction) as output.
"""
_handle_data_format()
if len(input_shape) != 4:
raise ValueError("Input shape should be a tuple "
"(conv_dim1, conv_dim2, conv_dim3, channels) "
"for tensorflow as backend or "
"(channels, conv_dim1, conv_dim2, conv_dim3) "
"for theano as backend")
block_fn = _get_block(block_fn)
input = Input(shape=input_shape, name='img_input')
age_input = Input(shape=(1,), name='age_input')
gender_input = Input(shape=(1,), name='gender_input')
# first conv
conv1 = _conv_bn_relu3D(filters=64, kernel_size=(7, 7, 7),
strides=(2, 2, 2),
kernel_regularizer=l2(reg_factor)
)(input)
pool1 = MaxPooling3D(pool_size=(3, 3, 3), strides=(2, 2, 2),
padding="same")(conv1)
# repeat blocks
block = pool1
filters = 64
for i, r in enumerate(repetitions):
block = _residual_block3d(block_fn, filters=filters,
kernel_regularizer=l2(reg_factor),
repetitions=r, is_first_layer=(i == 0)
)(block)
filters *= 2
# last activation
block_output = _bn_relu(block)
# average poll and classification
pool2 = AveragePooling3D(pool_size=(block.shape[DIM1_AXIS],
block.shape[DIM2_AXIS],
block.shape[DIM3_AXIS]),
strides=(1, 1, 1))(block_output)
flatten1 = Flatten()(pool2)
flatten1 = Concatenate()([flatten1, age_input, gender_input])
if dropout > 0:
flatten1 = Dropout(dropout)(flatten1)
if num_outputs > 1:
dense = Dense(units=num_outputs,
kernel_initializer="he_normal",
activation="softmax",
kernel_regularizer=l2(reg_factor),
bias_initializer=bias_initializer)(flatten1)
else:
dense = Dense(units=num_outputs,
kernel_initializer="he_normal",
activation="sigmoid",
kernel_regularizer=l2(reg_factor),
bias_initializer=bias_initializer)(flatten1)
model = Model(inputs=[input, age_input, gender_input], outputs=dense)
return model
@staticmethod
def build_resnet_18(input_shape, num_outputs, reg_factor=1e-4, dropout=0, bias_initializer='zeros'):
"""Build resnet 18."""
return Resnet3DBuilder.build(input_shape, num_outputs, basic_block, [2, 2, 2, 2], reg_factor=reg_factor, dropout=dropout, bias_initializer=bias_initializer)
@staticmethod
def build_resnet_34(input_shape, num_outputs, reg_factor=1e-4, dropout=0, bias_initializer='zeros'):
"""Build resnet 34."""
return Resnet3DBuilder.build(input_shape, num_outputs, basic_block, [3, 4, 6, 3], reg_factor=reg_factor, dropout=dropout, bias_initializer=bias_initializer)
@staticmethod
def build_resnet_50(input_shape, num_outputs, reg_factor=1e-4, dropout=0, bias_initializer='zeros'):
"""Build resnet 50."""
return Resnet3DBuilder.build(input_shape, num_outputs, bottleneck, [3, 4, 6, 3], reg_factor=reg_factor, dropout=dropout, bias_initializer=bias_initializer)
@staticmethod
def build_resnet_101(input_shape, num_outputs, reg_factor=1e-4, dropout=0, bias_initializer='zeros'):
"""Build resnet 101."""
return Resnet3DBuilder.build(input_shape, num_outputs, bottleneck, [3, 4, 23, 3], reg_factor=reg_factor, dropout=dropout, bias_initializer=bias_initializer)
@staticmethod
def build_resnet_152(input_shape, num_outputs, reg_factor=1e-4, dropout=0, bias_initializer='zeros'):
"""Build resnet 152."""
return Resnet3DBuilder.build(input_shape, num_outputs, bottleneck, [3, 8, 36, 3], reg_factor=reg_factor, dropout=dropout, bias_initializer=bias_initializer)