Commit 88f49451 authored by Gaetano Raffaele's avatar Gaetano Raffaele
Browse files

ENH: Added TanH to Attention (Dino).

parent 1981c613
No related merge requests found
Showing with 4 additions and 1 deletion
+4 -1
......@@ -4,6 +4,7 @@ import keras
from keras.callbacks import CSVLogger
import keras.backend as K
import csv
import sys
#from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import accuracy_score,f1_score,confusion_matrix,precision_recall_fscore_support
......@@ -14,6 +15,8 @@ ts_size = 16
patch_size = 25
n_bands = 4
n_epochs = int(sys.argv[1])
attTanh = int(sys.argv[2]) == 1
# KOUMBIA
#Load training inputs
......@@ -92,7 +95,7 @@ resh = keras.layers.Reshape(input_shape=(n_timestamps*ts_size,),target_shape=(n_
rnn_out = keras.layers.GRU(256,return_sequences=True,name='gru_base')(resh)
#rnn_out = keras.layers.GRU(512,name='gru_base')(input_ts)
rnn_out = keras.layers.Dropout(rate=0.5,name='gru_dropout')(rnn_out)
rnn_out = BasicAttention(name='gru_attention', with_tanh=False)(rnn_out)
rnn_out = BasicAttention(name='gru_attention', with_tanh=attTanh)(rnn_out)
rnn_aux = keras.layers.Dense(n_classes,activation='softmax',name='rnn_dense_layer_'+str(n_classes))(rnn_out)
#CNN branch
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment