An error occurred while loading the file. Please try again.
-
Ienco Dino authored0bbad62b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
# resnet model
# when tuning start with learning rate->mini_batch_size ->
# momentum-> #hidden_units -> # learning_rate_decay -> #layers
import tensorflow.keras as tfk
import numpy as np
import time
#import matplotlib
#from utils.utils import save_test_duration
#matplotlib.use('agg')
#import matplotlib.pyplot as plt
#from utils.utils import save_logs
#from utils.utils import calculate_metrics
class TwoBranchCNN1D:
def getBranch(self, input_layer, n_filters, dropOut, suffix):
conv_x = tfk.layers.Conv1D(filters=n_feature_maps, kernel_size=5, padding='valid', name="conv1_"+suffix, activation="relu")(input_layer)
conv_x = tfk.layers.BatchNormalization(name="bn1_"+suffix)(conv_x)
conv_x = tfk.layers.Dropout(dropOut, name="dropOut1_"+suffix)(conv_x)
conv_x = tfk.layers.Conv1D(filters=n_feature_maps, kernel_size=3, padding='valid', name="conv2_"+suffix, activation="relu")(conv_x)
conv_x = tfk.layers.BatchNormalization(name="bn2_"+suffix)(conv_x)
conv_x = tfk.layers.Dropout(dropOut, name="dropOut2_"+suffix)(conv_x)
conv_x = tfk.layers.Conv1D(filters=n_feature_maps*2, kernel_size=3, padding='valid', name="conv3_"+suffix, activation="relu")(conv_x)
conv_x = tfk.layers.BatchNormalization(name="bn3_"+suffix)(conv_x)
conv_x = tfk.layers.Dropout(dropOut,name="dropOut3_"+suffix)(conv_x)
conv_x = tfk.layers.Conv1D(filters=n_feature_maps*2, kernel_size=1, padding='valid', name="conv4_"+suffix, activation="relu")(conv_x)
conv_x = tfk.layers.BatchNormalization(name="bn4_"+suffix)(conv_x)
conv_x = tfk.layers.Dropout(dropOut,name="dropOut4_"+suffix)(conv_x)
return conv_x
def __init__(self, output_directory, input_shape1, input_shape2, nb_classes, verbose=False, build=True, load_weights=False):
#self.output_directory = output_directory
self.model = self.build_model(input_shape1, input_shape2, nb_classes)
self.callbacks = None
return self.model
def build_model(self, input_shape1, input_shape2, nb_classes):
n_feature_maps = 128
dropOut = 0.2
input_layer1 = tfk.layers.Input(name="input1", shape=input_shape1)
input_layer2 = tfk.layers.Input(name="input2", shape=input_shape2)
features1 = self.getBranch(input_layer1, n_feature_maps, dropOut, "input1")
features2 = self.getBranch(input_layer2, n_feature_maps, dropOut, "input2")
features = tfk.Concatenation(name="concat_layer")([features1, features2])
dense_layer = tfk.layers.Dense(512, activation='relu')(features)
dense_layer = tfk.layers.Dense(512, activation='relu')(dense_layer)
output_layer = tfk.layers.Dense(nb_classes, activation='softmax')(dense_layer)
model = tfk.models.Model(inputs=[input_layer1, input_layer2], outputs=output_layer)
model.compile(loss='categorical_crossentropy', optimizer=tfk.optimizers.Adam(), metrics=['accuracy'])
reduce_lr = keras.callbacks.ReduceLROnPlateau(monitor='loss', factor=0.5, patience=50, min_lr=0.0001)
#file_path = self.output_directory + 'best_model.hdf5'
#model_checkpoint = keras.callbacks.ModelCheckpoint(filepath=file_path, monitor='loss',
# save_best_only=True)
self.callbacks = [reduce_lr]#, model_checkpoint]
return model#, encoder
'''
def fit(self, x_train1, x_train2, y_train, batch_size, nb_epochs):
mini_batch_size = int(min(x_train.shape[0] / 10, batch_size))
start_time = time.time()
hist = self.model.fit([x_train1, x_train2], y_train, batch_size=mini_batch_size, epochs=nb_epochs,
verbose=self.verbose, callbacks=self.callbacks)
def predict(self, x_test1, x_test2):
start_time = time.time()
y_pred = model.predict([x_test1, x_test2])
return y_pred
'''