Commit e2f879c4 authored by Benedetti Paola's avatar Benedetti Paola
Browse files

moved

parent 7c7eecb2
No related merge requests found
Showing with 0 additions and 482 deletions
+0 -482
File moved
File moved
File moved
File moved
File moved
File moved
File moved
File moved
File moved
File moved
File moved
File moved
File moved
File moved
File moved
File moved
import numpy as np
import sys
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_recall_fscore_support
from sklearn.metrics import precision_score,accuracy_score,recall_score,f1_score
p_split = 70
n_split=10
#python meqsuresC.py RFC,GBC,SVC 1
#python meqsuresC.py RFC,GBC,SVC 2
arr_C=sys.argv[1].split(',')
norm=int(sys.argv[2])
precision = np.zeros(3)
recall = np.zeros(3)
fscore = np.zeros(3)
for class_ in arr_C:
for i in range(n_split):
var_totpred = './dataset/N%d/%s_truthpred_%d%s%d%s'%(norm,class_,p_split,'p/totpred',i,'.npy')
var_gt='./dataset/N%d/%s_truthpred_%d%s%d%s'%(norm,class_,p_split,'p/gt',i,'.npy')
C_pred = np.load(var_totpred)
test_y = np.load(var_gt)
var_prec,var_rec,var_fsc,_ = precision_recall_fscore_support(test_y, C_pred)
#Summ P,R ans FS values for each class
precision = np.add(precision, np.array(var_prec))
recall = np.add(precision, np.array(var_rec))
fscore = np.add(precision, np.array(var_fsc))
#get the mean values of P,R,FS
precision = np.divide(precision,n_split)
recall = np.divide(recall,n_split)
fscore = np.divide(fscore,n_split)
#get other measures
acc_score = accuracy_score(test_y, C_pred)
prec_score = precision_score(test_y, C_pred, average='weighted')
rec_score = recall_score(test_y, C_pred, average='weighted')
fsc_score = f1_score(test_y, C_pred, average='weighted')
np.save('./dataset/N%d/%s_truthpred_%d%s'%(norm,class_,p_split,'p/precision.npy'), precision)
np.save('./dataset/N%d/%s_truthpred_%d%s'%(norm,class_,p_split,'p/recall.npy'), recall)
np.save('./dataset/N%d/%s_truthpred_%d%s'%(norm,class_,p_split,'p/fscore.npy'), fscore)
np.save('./dataset/N%d/%s_truthpred_%d%s'%(norm,class_,p_split,'p/accuracy_score.npy'), acc_score)
np.save('./dataset/N%d/%s_truthpred_%d%s'%(norm,class_,p_split,'p/precision_score.npy'), prec_score)
np.save('./dataset/N%d/%s_truthpred_%d%s'%(norm,class_,p_split,'p/recall_score.npy'), rec_score)
np.save('./dataset/N%d/%s_truthpred_%d%s'%(norm,class_,p_split,'p/fscore_score.npy'), fsc_score)
import numpy as np
import sys
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_recall_fscore_support
from sklearn.metrics import precision_score,accuracy_score,recall_score,f1_score,confusion_matrix
timesteps = 22
ninput = 13
p_split = 70
n_split=10
# python meaasuresLSTM LSTM,B_LSTM 8 64 3 1
arrT=sys.argv[1].split(',')
batchsz=int(sys.argv[2])
nunits=int(sys.argv[3])
nlayer=int(sys.argv[4])
norm=int(sys.argv[5])
opH=sys.argv[6]
precision = np.zeros(3)
recall = np.zeros(3)
fscore = np.zeros(3)
for T_lstm in arrT:
for i in range(n_split):
TP=[]
totalLayer=[]
totalClass=[]
if opH=='+':
var_totpred= './dataset/N%d/%s%d%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'b+/totpred',i,'.npy')
var_gt= './dataset/N%d/%s%d%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'b+/gt',i,'.npy')
if opH == 'c':
var_totpred= './dataset/N%d/%s%d%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'bc/totpred',i,'.npy')
var_gt= './dataset/N%d/%s%d%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'bc/gt',i,'.npy')
LSTM_pred = np.load(var_totpred)
test_y = np.load(var_gt)
#get confusion matrix
C = confusion_matrix(test_y, LSTM_pred)
#get true positive values for each class
TP.append(C[0][0])
TP.append(C[1][1])
TP.append(C[2][2])
#get total number of retrieved instances
totalLayer.append(C[0][0]+C[0][1]+C[0][2])
totalLayer.append(C[1][0]+C[1][1]+C[1][2])
totalLayer.append(C[2][0]+C[2][1]+C[2][2])
#get total amount of relevant instances
totalClass.append(C[0][0]+C[1][0]+C[2][0])
totalClass.append(C[0][1]+C[1][1]+C[2][1])
totalClass.append(C[0][2]+C[1][2]+C[2][2])
# PRECISION = TP / RETRIEVED_INSTANCES
prec_temp=np.divide(TP,totalLayer,dtype="float32")
# RECALL = TP / RELEVANT INSTANCES
recall_temp=np.divide(TP,totalClass,dtype="float32")
# FSCORE = 2*( P*R / P+R )
fscore_temp=2*(np.multiply(prec_temp,recall_temp,dtype="float32")/np.add(prec_temp,recall_temp,dtype="float32"))
# sum values per class
for j in range(0,3):
precision[j] = precision[j]+prec_temp[j]
recall[j] = recall[j]+recall_temp[j]
fscore[j] = fscore[j]+fscore_temp[j]
# get P,R and FS mean values per class
precision = np.divide(precision,n_split)
recall = np.divide(recall,n_split)
fscore = np.divide(fscore,n_split)
# get other measures
acc_score = accuracy_score(test_y, LSTM_pred)
prec_score = precision_score(test_y, LSTM_pred, average='weighted')
rec_score = recall_score(test_y, LSTM_pred, average='weighted')
fsc_score = f1_score(test_y, LSTM_pred, average='weighted')
#print 'precision',precision
#print 'recall',recall
#print 'fscore',fscore
#print 'acc_score',acc_score
#print 'prec_score',prec_score
#print 'rec_score',rec_score
#print 'fsc_score',fsc_score
if opH=='+':
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'b+/precision.npy'), precision)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'b+/recall.npy'), recall)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'b+/fscore.npy'), fscore)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'b+/accuracy_score.npy'), acc_score)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'b+/precision_score.npy'), prec_score)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'b+/recall_score.npy'), rec_score)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'b+/fscore_score.npy'), fsc_score)
if opH == 'c':
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'bc/precision.npy'), precision)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'bc/recall.npy'), recall)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'bc/fscore.npy'), fscore)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'bc/accuracy_score.npy'), acc_score)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'bc/precision_score.npy'), prec_score)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'bc/recall_score.npy'), rec_score)
np.save('./dataset/N%d/%s%d%s%d%s%d%s%d%s'%(norm,T_lstm,nlayer,'l_truthpred_',p_split,'p',nunits,'u',batchsz,'bc/fscore_score.npy'), fsc_score)
import numpy as np
import sys
from sklearn.metrics import accuracy_score
from sklearn.metrics import precision_recall_fscore_support
from sklearn.metrics import precision_score,accuracy_score,recall_score,f1_score,confusion_matrix
timesteps = 22
ninput = 13
p_split = 70
n_split=10
# python meaasuresLSTM LSTM,B_LSTM 8 64 3 1
batchsz=int(sys.argv[2])
nunits=int(sys.argv[3])
nlayer=int(sys.argv[4])
norm=int(sys.argv[5])
Tlstm=sys.argv[6]
precision = np.zeros(3)
recall = np.zeros(3)
fscore = np.zeros(3)
g_path = './dataset/N%d/%s_%dl_truthpred_%dp%du%db/'%(norm,Tlstm,norm,p_split_nunits,batchsz)
for i in range(n_split):
TP=[]
totalClass=[]
totalLayer=[]
var_totpred= g_path+'totpred%d.npy'% i
var_gt= g_path+'gt%d.npy'% i
LSTM_pred = np.load(var_totpred)
test_y = np.load(var_gt)
#get confusion matrix
C = confusion_matrix(test_y, LSTM_pred)
#get true positive values for each class
TP.append(C[0][0])
TP.append(C[1][1])
TP.append(C[2][2])
#get total number of retrieved instances
totalLayer.append(C[0][0]+C[0][1]+C[0][2])
totalLayer.append(C[1][0]+C[1][1]+C[1][2])
totalLayer.append(C[2][0]+C[2][1]+C[2][2])
#get total amount of relevant instances
totalClass.append(C[0][0]+C[1][0]+C[2][0])
totalClass.append(C[0][1]+C[1][1]+C[2][1])
totalClass.append(C[0][2]+C[1][2]+C[2][2])
# PRECISION = TP / RETRIEVED_INSTANCES
prec_temp=np.divide(TP,totalLayer,dtype="float32")
# RECALL = TP / RELEVANT INSTANCES
recall_temp=np.divide(TP,totalClass,dtype="float32")
# FSCORE = 2*( P*R / P+R )
fscore_temp=2*(np.multiply(prec_temp,recall_temp,dtype="float32")/np.add(prec_temp,recall_temp,dtype="float32"))
# sum values per class
for j in range(0,3):
precision[j] = precision[j]+prec_temp[j]
recall[j] = recall[j]+recall_temp[j]
fscore[j] = fscore[j]+fscore_temp[j]
# get P,R and FS mean values per class
precision = np.divide(precision,n_split,dtype="float32")
recall = np.divide(recall,n_split,dtype="float32")
fscore = np.divide(fscore,n_split,dtype="float32")
# get other measures
acc_score = accuracy_score(test_y, LSTM_pred)
prec_score = precision_score(test_y, LSTM_pred, average='weighted')
rec_score = recall_score(test_y, LSTM_pred, average='weighted')
fsc_score = f1_score(test_y, LSTM_pred, average='weighted')
#print 'precision',precision
#print 'recall',recall
#print 'fscore',fscore
#print 'acc_score',acc_score
#print 'prec_score',prec_score
#print 'rec_score',rec_score
#print 'fsc_score',fsc_score
np.save(g_path+'precision.npy'), precision)
np.save(g_path+'recall.npy'), recall)
np.save(g_path+'fscore.npy'), fscore)
np.save(g_path+'accuracy_score.npy'), acc_score)
np.save(g_path+'precision_score.npy'), prec_score)
np.save(g_path+'recall_score.npy'), rec_score)
np.save(g_path+'fscore_score.npy'), fsc_score)
import numpy as np
import pandas as pd
'''
#FSCORE 64-128-256 HIDDEN UNITS N1 1LIV
var_1_64_8N1 = './dataset/N1/LSTM1l_truthpred_70p64u8b/fscore_score.npy'
var_1_128_8N1 = './dataset/N1/LSTM1l_truthpred_70p128u8b/fscore_score.npy'
var_1_256_8N1 = './dataset/N1/LSTM1l_truthpred_70p256u8b/fscore_score.npy'
#FSCORE 64-128-256 HIDDEN UNITS N2 1LIV
var_1_64_8N2 = './dataset/N2/LSTM1l_truthpred_70p64u8b/fscore_score.npy'
var_1_128_8N2 = './dataset/N2/LSTM1l_truthpred_70p128u8b/fscore_score.npy'
var_1_256_8N2 = './dataset/N2/LSTM1l_truthpred_70p256u8b/fscore_score.npy'
var_N1 = './dataset/N1/RFC_truthpred_70p/fscore_score.npy'
#FSCORE 64-128-256 HIDDEN UNITS N1 3LIV
var_3_64_8N1 = './dataset/N1/LSTM3l_truthpred_70p64u8b/fscore_score.npy'
var_3_128_8N1 = './dataset/N1/LSTM3l_truthpred_70p128u8b/fscore_score.npy'
var_3_256_8N1 = './dataset/N1/LSTM3l_truthpred_70p256u8b/fscore_score.npy'
#FSCORE 64-128-256 HIDDEN UNITS N2 3LIV
var_3_64_8N2 = './dataset/N2/LSTM3l_truthpred_70p64u8b/fscore_score.npy'
var_3_128_8N2 = './dataset/N2/LSTM3l_truthpred_70p128u8b/fscore_score.npy'
var_3_256_8N2 = './dataset/N2/LSTM3l_truthpred_70p256u8b/fscore_score.npy'
#FSCORE RFC
var_N2 = './dataset/N2/RFC_truthpred_70p/fscore_score.npy'
#FSCORE 64-128 HIDDEN UNITS N1 1LIV
var_B_1_64_8N1 = './dataset/N1/B_LSTM1l_truthpred_70p64u8b/fscore_score.npy'
var_B_1_128_8N1 = './dataset/N1/B_LSTM1l_truthpred_70p128u8b/fscore_score.npy'
#FSCORE 64-128 HIDDEN UNITS N1 3LIV
var_B_3_64_8N1 = './dataset/N1/B_LSTM3l_truthpred_70p64u8b/fscore_score.npy'
var_B_3_128_8N1 = './dataset/N1/B_LSTM3l_truthpred_70p128u8b/fscore_score.npy'
#FSCOREs N1
fsN1=[]
#LSTM N1 1LIV
fsN1.append(np.load(var_1_64_8N1))
fsN1.append(np.load(var_1_128_8N1))
fsN1.append(np.load(var_1_256_8N1))
#LSTM N1 L3
fsN1.append(np.load(var_3_64_8N1))
fsN1.append(np.load(var_3_128_8N1))
fsN1.append(np.load(var_3_256_8N1))
#RFC
fsN1.append(np.load(var_N1))
#BI LSTM
fsN1.append(np.load(var_B_1_64_8N1))
fsN1.append(np.load(var_B_1_128_8N1))
fsN1.append(np.load(var_B_3_64_8N1))
fsN1.append(np.load(var_B_3_128_8N1))
#FSCORES N2
fsN2=[]
#LSTM N2 1LIV
fsN2.append(np.load(var_1_64_8N2))
fsN2.append(np.load(var_1_128_8N2))
fsN2.append(np.load(var_1_256_8N2))
#LSTM N2 L3
fsN2.append(np.load(var_3_64_8N2))
fsN2.append(np.load(var_3_128_8N2))
fsN2.append(np.load(var_3_256_8N2))
#RFC
fsN2.append(np.load(var_N2))
fsN2.append('-')
fsN2.append('-')
fsN2.append('-')
fsN2.append('-')
print "Fscores"
print "N1:",np.array(fsN1)
print "N2:",np.array(fsN2)
fsN1N2=np.vstack(( np.array(fsN1),np.array(fsN2) ))
fsLSTM_rfc = pd.DataFrame(np.array(fsN1N2),columns = ['1_64_8','1_128_8','1_256_8','3_64_8','3_128_8','3_256_8','RFC','bi_1_64_8','bi_1_128_8','bi_3_64_8','bi_3_128_8'])
fsLSTM_rfc.to_csv("fscoreLSTM-RFC_N1N2.csv")
#SINGLE LAYER ACCURACY 64-128-256 HIDDEN UNITS N1 1LIV
a_1_64_8N1 = './dataset/N1/LSTM1l_truthpred_70p64u8b/accuracy_score.npy'
a_1_128_8N1 = './dataset/N1/LSTM1l_truthpred_70p128u8b/accuracy_score.npy'
a_1_256_8N1 = './dataset/N1/LSTM1l_truthpred_70p256u8b/accuracy_score.npy'
#MUTLILAYER ACCURACY 64-128-256 HIDDEN UNITS N1 3LIV
a_3_64_8N1 = './dataset/N1/LSTM3l_truthpred_70p64u8b/accuracy_score.npy'
a_3_128_8N1 = './dataset/N1/LSTM3l_truthpred_70p128u8b/accuracy_score.npy'
a_3_256_8N1 = './dataset/N1/LSTM3l_truthpred_70p256u8b/accuracy_score.npy'
#SINGLE LAYER ACCURACY 64-128-256 HIDDEN UNITS N2 1LIV
a_1_64_8N2 = './dataset/N2/LSTM1l_truthpred_70p64u8b/accuracy_score.npy'
a_1_128_8N2 = './dataset/N2/LSTM1l_truthpred_70p128u8b/accuracy_score.npy'
a_1_256_8N2 = './dataset/N2/LSTM1l_truthpred_70p256u8b/accuracy_score.npy'
a_N1 = './dataset/N1/RFC_truthpred_70p/accuracy_score.npy'
#MUTLILAYER ACCURACY 64-128-256 HIDDEN UNITS N2 3LIV
a_3_64_8N2 = './dataset/N2/LSTM3l_truthpred_70p64u8b/accuracy_score.npy'
a_3_128_8N2 = './dataset/N2/LSTM3l_truthpred_70p128u8b/accuracy_score.npy'
a_3_256_8N2 = './dataset/N2/LSTM3l_truthpred_70p256u8b/accuracy_score.npy'
a_N2 = './dataset/N2/RFC_truthpred_70p/accuracy_score.npy'
#BIDIRECTIONAL ACCURACY 64-128 HIDDEN UNITS N1 1LIV
a_B_1_64_8N1 = './dataset/N1/B_LSTM1l_truthpred_70p64u8b/accuracy_score.npy'
a_B_1_128_8N1 = './dataset/N1/B_LSTM1l_truthpred_70p128u8b/accuracy_score.npy'
#BIDIRECTIONAL ACCURACY 64-128 HIDDEN UNITS N1 3LIV
a_B_3_64_8N1 = './dataset/N1/B_LSTM3l_truthpred_70p64u8b/accuracy_score.npy'
a_B_3_128_8N1 = './dataset/N1/B_LSTM3l_truthpred_70p128u8b/accuracy_score.npy'
#ATTENTION ACCURACY 64-128 HU N1 1L
a_Attention_1_64_8N1 = './dataset/N1/Attention1l_truthpred_70p64u8b/accuracy_score.npy'
a_Attention_1_128_8N1 = './dataset/N1/Attention1l_truthpred_70p128u8b/accuracy_score.npy'
#ATTENTION ACCURACY 64-128 HU N1 3L
a_Attention_3_64_8N1 = './dataset/N1/Attention1l_truthpred_70p64u8b/accuracy_score.npy'
a_Attention_3_128_8N1 = './dataset/N1/Attention1l_truthpred_70p128u8b/accuracy_score.npy'
#ACCURACY N1
aN1=[]
#LSTM 1LIV
aN1.append(np.load(a_1_64_8N1))
aN1.append(np.load(a_1_128_8N1))
aN1.append(np.load(a_1_256_8N1))
#LSTM 3LIV
aN1.append(np.load(a_3_64_8N1))
aN1.append(np.load(a_3_128_8N1))
aN1.append(np.load(a_3_256_8N1))
#RFC
aN1.append(np.load(a_N1))
#BI LSTM
aN1.append(np.load(a_B_1_64_8N1))
aN1.append(np.load(a_B_1_128_8N1))
aN1.append(np.load(a_B_3_64_8N1))
aN1.append(np.load(a_B_3_128_8N1))
#ACCURACY N2
aN2=[]
aN2.append(np.load(a_1_64_8N2))
aN2.append(np.load(a_1_128_8N2))
aN2.append(np.load(a_1_256_8N2))
aN2.append(np.load(a_3_64_8N2))
aN2.append(np.load(a_3_128_8N2))
aN2.append(np.load(a_3_256_8N2))
aN2.append(np.load(a_N2))
aN2.append('-')
aN2.append('-')
aN2.append('-')
aN2.append('-')
print "Accuracy"
print "N1:",np.array(fsN1)
print "N2:",np.array(fsN2)
aN1N2=np.vstack(( np.array(aN1),np.array(aN2) ))
aLSTM_rfc = pd.DataFrame(np.array(aN1N2),columns = ['1_64_8','1_128_8','1_256_8','3_64_8','3_128_8','3_256_8','RFC','bi_1_64_8','bi_1_128_8','bi_3_64_8','bi_3_128_8'])
aLSTM_rfc.to_csv("accuracyLSTM-RFC_N1N2.csv")
#FSCORE ACCURACY 64-128 HU N1 1L
var_Attention_1_64_8N1 = './dataset/N1/Attention1l_truthpred_70p64u8b/fscore_score.npy'
var_Attention_1_128_8N1 = './dataset/N1/Attention1l_truthpred_70p128u8b/fscore_score.npy'
#FSCORE ACCURACY 64-128 HU N1 3L
var_Attention_3_64_8N1 = './dataset/N1/Attention1l_truthpred_70p64u8b/fscore_score.npy'
var_Attention_3_128_8N1 = './dataset/N1/Attention1l_truthpred_70p128u8b/fscore_score.npy'
#FSCORE ACCURACY 64-128 HU N1 1L
var_concatH_1_64_8N1 = './dataset/N1/concatH1l_truthpred_70p64u8b/fscore_score.npy'
var_concatH_1_128_8N1 = './dataset/N1/concatH1l_truthpred_70p128u8b/fscore_score.npy'
#FSCORE ACCURACY 64-128 HU N1 3L
var_concatH_3_64_8N1 = './dataset/N1/concatH1l_truthpred_70p64u8b/fscore_score.npy'
var_concatH_3_128_8N1 = './dataset/N1/concatH1l_truthpred_70p128u8b/fscore_score.npy'
fsN1=[]
#LSTM 1LIV
fsN1.append(np.load(var_Attention_1_64_8N1))
fsN1.append(np.load(var_Attention_1_128_8N1))
fsN1.append(np.load(var_Attention_3_64_8N1))
fsN1.append(np.load(var_Attention_3_128_8N1))
fsN1.append(np.load(var_concatH_1_64_8N1))
fsN1.append(np.load(var_concatH_1_128_8N1))
fsN1.append(np.load(var_concatH_3_64_8N1))
fsN1.append(np.load(var_concatH_3_128_8N1))
fsAtt_concat = pd.DataFrame(np.array(fsN1),columns = ['A1_64_8','A1_128_8','A3_64_8','A3_128_8','conc_1_64_8','conc_1_128_8','conc_3_64_8','conc_3_128_8'])
fsAtt_concat.to_csv("fscoreAttentionConcat.csv")
'''
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment