diff --git a/.gitignore b/.gitignore
index 83658ec52733073084336058333361a36e6c8de6..7093930752e4234da1ca6825b8db3ac8a1174883 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,2 @@
 *.pyc
-*.log
+*.log*
diff --git a/Archive.py b/Archive.py
index 55f948ee8fc367d2b00b6456fd05aed972e6d26e..e87ecaf30b51d389906265baf9564832b58640b3 100644
--- a/Archive.py
+++ b/Archive.py
@@ -84,13 +84,11 @@ class Archive():
         """
             Test function to download a few archives
             
-            :param few_list_archive: [archive_download, out_archive]
-                        
-                        with :
+            :param few_list_archive: [archive_download, out_archive] with :
             
-                        * archive_dowload : Archives downloaded
+                * archive_dowload : Archives downloaded
                         
-                        * out_archive : Output archives path
+                * out_archive : Output archives path
                         
             :type few_list_archive: list dimension 2
         """
@@ -98,7 +96,7 @@ class Archive():
         _few_list_archive = np.array(few_list_archive)
         # Verify list informations
         if _few_list_archive.ndim < 2: 
-            print ('Few information in the list')
+            self.logger.info('Few information in the list')
         else:
             self.list_archive = few_list_archive
         
@@ -270,7 +268,8 @@ class Archive():
                         url_archive = link_archive.replace(self.resto, "rocket/#")
                         archive_download = url_archive.replace("/download", "") # Path to download
                         out_archive = self._folder + '/' + self._repertory + '/' + name_archive + '.tgz' # Name after download
-                        if len(self.list_archive) == 0 : self.list_archive.append([archive_download, out_archive, feature_id]) 
+                        # if len(self.list_archive) == 0 : self.list_archive.append([archive_download, out_archive, feature_id])
+                        self.list_archive.append([archive_download, out_archive, feature_id]) 
                     
                     # Verify if there is another page (next)
                     if data_Dict['properties']['links'][len(data_Dict['properties']['links'])-1]['title'] == 'suivant':
@@ -378,7 +377,7 @@ class Archive():
                     if self._captor == 'Landsat':
                         out_folder_unpack = folder_unpack + '/' + os.path.split(img)[1][:len(os.path.split(img)[1])-4]
                         if not os.path.exists(out_folder_unpack):
-                            print ('Unpack :'+os.path.split(img)[1])   
+                            self.logger.info('Unpack :'+os.path.split(img)[1])   
                             tfile = tarfile.open(img, 'r:gz')
                             tfile.extractall(str(folder_unpack))
                     
@@ -394,7 +393,7 @@ class Archive():
                         # Cloud images
                         # Cloud's path not exists in xmlfile, then replace 'SAT' by 'NUA'
                         ci = out_folder_unpack + '/' + xmlfile.xpath("/METADATA/FILES/MASK_SATURATION")[0].text.replace('_SAT', '_NUA')
-                    
+
                     elif self._captor == 'SENTINEL2':
                         
                         tzip = zipfile.ZipFile(img)
@@ -404,13 +403,13 @@ class Archive():
                         zip_folder = zip_img[0].split('/')[0]
                         out_folder_unpack = folder_unpack + '/' + zip_folder
                         
-                        print ('Unpack :' + os.path.split(zip_folder)[1])
+                        self.logger.info('Unpack :' + os.path.split(zip_folder)[1])
                         extraction_img = []
                         for e in extent_img:
                             z_out = [f for f in zip_img if e in f][0]
                             extraction_img.append(folder_unpack + '/' + str(z_out))
                             if not os.path.exists(folder_unpack + '/' + str(z_out)):
-                                print('Extract :%s' %(z_out))
+                                self.logger.info('Extract :%s' %(z_out))
                                 tzip.extract(str(z_out), str(folder_unpack))
                                                  
                         # On xmlfile, extract dates, path of images, cloud's images
@@ -428,13 +427,14 @@ class Archive():
                             stack_img.vrt_translate_gdal('vrt', input_stack, hi[:-4] + '.VRT')
                             stack_img.vrt_translate_gdal('translate', hi[:-4] + '.VRT', hi)
                         # Cloud images
+                        
                         ci = out_folder_unpack + '/' + xmlfile.xpath("/Muscate_Metadata_Document/Product_Organisation/Muscate_Product/Mask_List/Mask/Mask_File_List/MASK_FILE")[2].text
-                       
+                        
                     self.list_img.append([di[0], di[1], di[2], str(hi), str(ci)])
                     
                     # Create a list with dates without duplicates
                     if not di in self.single_date:
                         self.single_date.append(di)
     
-        print ("End of unpack archives")
+        self.logger.info("End of unpack archives.")
     
diff --git a/Constantes.py b/Constantes.py
index 9ec418553205a57943cb28340dcc2a75ff634c19..c313d6441365972cc06125c421ac3136b20ac2f9 100644
--- a/Constantes.py
+++ b/Constantes.py
@@ -1,6 +1,11 @@
 
+import logging
+
+NIVEAU_DEFAUT = logging.DEBUG
+
 SIMPLE_MODE = 0
 EXPERT_MODE = 1
 
 MULTIPROCESSING_ENABLE = True
 MULTIPROCESSING_DISABLE = False
+
diff --git a/Outils.py b/Outils.py
index 7500da87ef663f921b7f527cd1d6bbc260a7906a..7b8f835c9146dedf8183017c8fcdd2b30327fc99 100644
--- a/Outils.py
+++ b/Outils.py
@@ -6,6 +6,14 @@ import logging
 from logging.handlers import RotatingFileHandler
 import signal
 from contextlib import contextmanager
+import inspect
+import traceback
+
+try:
+	import Constantes
+except :
+	pass
+
 
 class Fusion(argparse.Action):
 	def __init__(self, option_strings, dest, nargs=None, **kwargs):
@@ -15,21 +23,35 @@ class Fusion(argparse.Action):
 
 class Log(object):
 
-	def __init__(self, dossier, nomFichier, niveau=logging.DEBUG):
+	def __init__(self, dossier, nomFichier, niveau=None):
+
 		super(Log, self).__init__()
 
+		stack = inspect.stack()
+		nom_classe = stack[1][0].f_locals["self"].__class__.__name__
+
 		self.__logger__ = logging.getLogger(nomFichier)
+
+		if niveau is None:
+			try:
+				niveau = Constantes.NIVEAU_DEFAUT
+			except :
+				niveau = logging.DEBUG
+
 		self.__logger__.setLevel(niveau)
 
-		format = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s', datefmt='%d-%m-%Y %H:%M:%S')
+		format_fichier = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s', datefmt='%d-%m-%Y %H:%M:%S')
 		fichierLog = RotatingFileHandler("{0}/{1}.log".format(dossier, nomFichier), 'a', 1000000, 1)
 
 		fichierLog.setLevel(niveau)
-		fichierLog.setFormatter(format)
+		fichierLog.setFormatter(format_fichier)
 		self.__logger__.addHandler(fichierLog)
 
+		format_console = logging.Formatter('%(asctime)s :: {0} :: %(levelname)s :: %(message)s'.format(nom_classe), datefmt='%d-%m-%Y %H:%M:%S')
+
 		console = logging.StreamHandler()
 		console.setLevel(niveau)
+		console.setFormatter(format_console)
 		self.__logger__.addHandler(console)
 
 	def info(self, message):
diff --git a/PHYMOBAT.py b/PHYMOBAT.py
index 8278ebf5c743fb8c5b910b352353f723e8ba7a89..dc9fee000df8a7e9ecee18ca1f78209e050f4178 100644
--- a/PHYMOBAT.py
+++ b/PHYMOBAT.py
@@ -81,7 +81,7 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 
 		# Chargement et lancement automatique pour les tests à supprimer
 		self.open_backup("/home/commandre/Documents/data_test_2/2018.xml")
-		# self.ok_button()
+		self.ok_button()
 		
 	def initUI(self):
 		
@@ -264,7 +264,7 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 		self.path_ortho = "%s" % self.ui.lineEdit_VHRS.text()
 		
 		# MNT image path by line edit
-		self.path_mnt = "%s" % self.ui.lineEdit_MNT.text()		
+		self.path_mnt = self.ui.lineEdit_MNT.text()		
 		
 		# Output shapefile field name by line edit and field type by combo box
 		try:
@@ -453,10 +453,12 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 		"""
 		
 		nb_sample = len(self.sample_name)# Compute number of samples added. Condition : max three. 
+
 		# Study area shapefile path by line edit if no processing other
 		# Because the function "Vector" need study area
-		self.path_area = "%s" % self.ui.lineEdit_area_path.text()
-		if self.path_area == '':
+		self.path_area = self.ui.lineEdit_area_path.text()
+
+		if not self.path_area :
 			self.forget_study_area()
 		
 		if self.current_mode == Constantes.EXPERT_MODE:
@@ -521,40 +523,40 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 				self.ui.lineEdit_select_sample_nb_poly.clear()
 				
 		elif self.current_mode == Constantes.SIMPLE_MODE:
-			
 			# Simple mode
+
 			# Append a sample name by line Edit.
 			# For the sample of the RPG file. It launch the Rpg class. And append a other sample from Rpg class 
-			self.sample_name.append(self.i_rpg("%s" % self.ui.lineEdit_sample_path.text()))
+			self.sample_name.append(self.i_rpg(self.ui.lineEdit_sample_path.text()))
 			
 			# Append two sample field names by line edit. It must be the same.
-			self.fieldname_args.append("%s" % self.ui.lineEdit_select_sample_fieldname_1.text())
-			self.fieldname_args.append("%s" % self.ui.lineEdit_select_sample_fieldname_2.text())
+			self.fieldname_args.append(self.ui.lineEdit_select_sample_fieldname_1.text())
+			self.fieldname_args.append(self.ui.lineEdit_select_sample_fieldname_2.text())
+
 			# Append sample class names by line edit. One or more for every sample
-			self.class_args.append("%s" % self.ui.lineEdit_select_sample_class_1.text())
-			self.class_args.append("%s" % self.ui.lineEdit_select_sample_class_2.text())
+			self.class_args.append(self.ui.lineEdit_select_sample_class_1.text())
+			self.class_args.append(self.ui.lineEdit_select_sample_class_2.text())
+
 			# Append number of polygons for every samples by line edit.
-			self.list_nb_sample.append("%s" % self.ui.lineEdit_select_sample_nb_poly.text())
+			self.list_nb_sample.append(self.ui.lineEdit_select_sample_nb_poly.text())
 			
 			# Same process that the RPG process except the start of the RPG class.
 			# To Grass/wooden
-			self.sample_name.append("%s" % self.ui.lineEdit_sample_path_2.text())
-			self.fieldname_args.append("%s" % self.ui.lineEdit_select_sample_fieldname_3.text())
-			self.fieldname_args.append("%s" % self.ui.lineEdit_select_sample_fieldname_4.text())
-			self.class_args.append("%s" % self.ui.lineEdit_select_sample_class_3.text())
-			self.class_args.append("%s" % self.ui.lineEdit_select_sample_class_4.text())
-			self.list_nb_sample.append("%s" % self.ui.lineEdit_select_sample_nb_poly_2.text())
+			self.sample_name.append(self.ui.lineEdit_sample_path_2.text())
+			self.fieldname_args.append(self.ui.lineEdit_select_sample_fieldname_3.text())
+			self.fieldname_args.append(self.ui.lineEdit_select_sample_fieldname_4.text())
+			self.class_args.append(self.ui.lineEdit_select_sample_class_3.text())
+			self.class_args.append(self.ui.lineEdit_select_sample_class_4.text())
+			self.list_nb_sample.append(self.ui.lineEdit_select_sample_nb_poly_2.text())
 			
 			# To wooden
-			self.sample_name.append("%s" % self.ui.lineEdit_sample_path_3.text())
-			self.fieldname_args.append("%s" % self.ui.lineEdit_select_sample_fieldname_5.text())
-			self.fieldname_args.append("%s" % self.ui.lineEdit_select_sample_fieldname_6.text())
-			self.class_args.append("%s" % self.ui.lineEdit_select_sample_class_5.text())
-			self.class_args.append("%s" % self.ui.lineEdit_select_sample_class_6.text())
-			self.list_nb_sample.append("%s" % self.ui.lineEdit_select_sample_nb_poly_3.text())
-			
-			nb_sample = 3
-	
+			self.sample_name.append(self.ui.lineEdit_sample_path_3.text())
+			self.fieldname_args.append(self.ui.lineEdit_select_sample_fieldname_5.text())
+			self.fieldname_args.append(self.ui.lineEdit_select_sample_fieldname_6.text())
+			self.class_args.append(self.ui.lineEdit_select_sample_class_5.text())
+			self.class_args.append(self.ui.lineEdit_select_sample_class_6.text())
+			self.list_nb_sample.append(self.ui.lineEdit_select_sample_nb_poly_3.text())
+				
 	def clear_sample(self):
 		"""
 			Function to clear sample record. Clear in the interface and in the memory list.
@@ -848,7 +850,6 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 			if ok == 1:	   
 				# Compute a output slope raster 
 				if self.ui.checkBox_MNT.isChecked():
-				
 					self.i_slope()	   
 				
 				# Downloading and processing on theia platform
@@ -929,20 +930,20 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 		if self.current_mode == Constantes.SIMPLE_MODE:
 			
 			# Compute a output slope raster 
-
-			if self.path_mnt != "":
+			if self.path_mnt:
 				self.i_slope()
 		
 			# Save the sample features
 			self.add_sample()
 			
 			# Look at if the the images has been already downloaded, download them otherwise
-			# Commenté pour ne traiter qu'une image
 			self.i_download()
 			
 			# function to launch the image processing	
 			self.i_images_processing()
 
+			raise("Phymobat")
+
 			# To launch texture processing only
 			self.i_vhrs()
 
@@ -960,7 +961,8 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 		self.out_fieldname_carto = ['ID', 'AREA']
 		self.out_fieldtype_carto = [ogr.OFTString, ogr.OFTReal]
 		# Images after processing images
-		self.out_ndvistats_folder_tab = defaultdict(list)
+		self.out_ndvistats_folder_
+		tab = defaultdict(list)
 		
 		Processing.__init__(self)# Initialize variable without to close and launch again the application
 		
diff --git a/Precision_moba.py b/Precision_moba.py
index ff3e1ddb7b6eeddac0bad4469f9dc1d4e378ebc0..be00495c0882647df15f268d31238161a6a3c0f0 100644
--- a/Precision_moba.py
+++ b/Precision_moba.py
@@ -152,15 +152,10 @@ class Precision_moba():
             kappa = self.cohen_kappa_score(fb_stats_val, fb_stats_in)
             f.write("\n")
             f.write("Kappa : " + str(kappa) + "\n")
-            print('')
-            print('')
-            print('Accuracy : %f, Kappa : %f, Recall : %f, Precision : %f, F_score : %f' % (accur, kappa, recall, pr, f_scor))
-            print('')
-            print (all_pr)
-            print('')
-            print('Confusion matrix :')
-            print (cm)
-        
+            
+            self.logger.info('\n\n Accuracy : {0}, Kappa : {1}, Recall : {2}, Precision : {3},\
+                F_score : {4} \n {5} \n Confusion matrix : \n {6} \
+                '.format(accur, kappa, recall, pr, f_scor, all_pr, cm))  
         
     def cohen_kappa_score(self, y1, y2, labels=None):
         """
diff --git a/Processing.py b/Processing.py
index af90bb46500dc869cabdbab24830fd405f64934d..f3e885eee2d223ced555571bc5cebf95e3137c59 100644
--- a/Processing.py
+++ b/Processing.py
@@ -22,6 +22,7 @@ import numpy as np
 import subprocess
 from sklearn.ensemble import RandomForestClassifier
 from sklearn import tree
+
 try :
 	import ogr, gdal
 except :
@@ -49,7 +50,7 @@ from collections import defaultdict
 from multiprocessing import Process
 from multiprocessing.managers import BaseManager, DictProxy
 
-class Processing():
+class Processing(object):
 	
 	"""
 		Main processing. This class launch the others system classes. It take into account
@@ -150,16 +151,18 @@ class Processing():
 						 'Ligneux mixtes', 'Ligneux denses',\
 						 'Agriculture', 'Eboulis', \
 						 'Forte phytomasse', 'Moyenne phytomasse', 'Faible phytomasse']
+		
 		# Sample field names 2 by 2
 		self.fieldname_args = []
-#								'CODE_GROUP', 'CODE_GROUP',\
-#						   'echant', 'echant',\
-#						   'echant', 'echant']
+		#	'CODE_GROUP', 'CODE_GROUP',\
+		#	'echant', 'echant',\
+		#	'echant', 'echant']
+
 		# Sample class names 2 by 2
 		self.class_args = []
-#							'1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 14, 15, 20, 21, 22, 23, 24, 26, 28', '18',\
-#					   'H','LF, LO',\
-#					   'LF', 'LO']
+		#	'1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 14, 15, 20, 21, 22, 23, 24, 26, 28', '18',\
+		#	'H','LF, LO',\
+		#	'LF', 'LO']
 		
 		# Decision tree combination
 		self.tree_direction = [[0],\
@@ -189,8 +192,9 @@ class Processing():
 		self.mp = Constantes.MULTIPROCESSING_DISABLE
 		
 		# Function followed
-		self.check_download = ''
+		self.check_download = None
 		self.decis = {}
+
 		# Images after processing images
 		self.out_ndvistats_folder_tab = defaultdict(list)
 		
@@ -233,45 +237,48 @@ class Processing():
 	def i_img_sat(self):
 		
 		"""
-		Interface function to processing satellite images:
-		
-			1. Clip archive images and modify Archive class to integrate clip image path.
-			With :func:`Toolbox.clip_raster` in ``Toolbox`` module.
-		
-			2. Search cloud's percentage :func:`RasterSat_by_date.RasterSat_by_date.pourc_cloud`, select 
-			image and compute ndvi index :func:`RasterSat_by_date.RasterSat_by_date.calcul_ndvi`. If cloud's percentage is 
-			greater than 40%, then not select and compute ndvi index.
-		
-			3. Compute temporal stats on ndvi index [min, max, std, min-max]. With :func:`Toolbox.calc_serie_stats` 
-			in ``Toolbox`` module.
+			Interface function to processing satellite images:
 			
-			4. Create stats ndvi raster and stats cloud raster.
+				1. Clip archive images and modify Archive class to integrate clip image path.
+				With :func:`Toolbox.clip_raster` in ``Toolbox`` module.
 			
-			>>> import RasterSat_by_date
-			>>> stats_test = RasterSat_by_date(class_archive, big_folder, one_date)
-			>>> stats_test.complete_raster(stats_test.create_raster(in_raster, stats_data, in_ds), stats_data)
-		"""
-
-		# Clip archive images and modify Archive class to integrate clip image path
-		for clip in self.check_download.list_img:
-			clip_index = self.check_download.list_img.index(clip)
+				2. Search cloud's percentage :func:`RasterSat_by_date.RasterSat_by_date.pourc_cloud`, select 
+				image and compute ndvi index :func:`RasterSat_by_date.RasterSat_by_date.calcul_ndvi`. If cloud's percentage is 
+				greater than 40%, then not select and compute ndvi index.
 			
+				3. Compute temporal stats on ndvi index [min, max, std, min-max]. With :func:`Toolbox.calc_serie_stats` 
+				in ``Toolbox`` module.
+				
+				4. Create stats ndvi raster and stats cloud raster.
+				
+				>>> import RasterSat_by_date
+				>>> stats_test = RasterSat_by_date(class_archive, big_folder, one_date)
+				>>> stats_test.complete_raster(stats_test.create_raster(in_raster, stats_data, in_ds), stats_data)
+		"""
+		
+		# Map projection of the image and the cloud mask
+		for clip_index, clip in enumerate(self.check_download.list_img):
 			current_list = Toolbox()
-			current_list.imag = clip[3]
+			current_list.image = clip[3]
 			current_list.vect = self.path_area
+
 			current_list.check_proj() # Check if projection is RFG93 
 			self.check_download.list_img[clip_index][3] = current_list.clip_raster() # Multispectral images
 			
-			current_list.imag = clip[4]
+			current_list.image = clip[4]
 			current_list.check_proj() # Check if projection is RFG93 
 			self.check_download.list_img[clip_index][4] = current_list.clip_raster() # Cloud images
 		   
 		# Images pre-processing
 		spectral_out = []
+		check_L8 = RasterSat_by_date(self.check_download, self.folder_processing)
+
 		for date in self.check_download.single_date:
 			   
-			check_L8 = RasterSat_by_date(self.check_download, self.folder_processing, date)
-			check_L8.mosaic_by_date()
+			# check_L8 = RasterSat_by_date(self.check_download, self.folder_processing, date)
+			check_L8.mosaic_by_date(date)
+
+			raise("STOP")
 			 
 			# Search cloud's percentage, select image and compute ndvi index if > cl
 			cl = check_L8.pourc_cloud(check_L8._one_date[3], check_L8._one_date[4])
@@ -313,7 +320,7 @@ class Processing():
 		study_slope.extract_slope()# Call this function to compute slope raster
 		self.path_mnt = study_slope.out_mnt
 	
-	def i_vhrs(self):#, vs):
+	def i_vhrs(self):
 		"""
 			Interface function to processing VHRS images. It create two OTB texture images :func:`Vhrs.Vhrs` : SFS Texture and Haralick Texture
 		"""
@@ -330,12 +337,11 @@ class Processing():
 		self.out_ndvistats_folder_tab['haralick'] = texture_irc.out_haralick
 		
 	def i_images_processing(self, vs=1): 
-		
 		"""
-			Interface function to launch processing VHRS images :func:`i_vhrs` and satellite images :func:`i_img_sat` in multi-processing.
+			Interface function to launch processing VHRS images :
+				func:`i_vhrs` and satellite images :func:`i_img_sat` in multi-processing.
 			
 			:param vs: Boolean variable to launch texture processing because of interface checkbox -> 0 or 1.
-			
 				- 0 means, not texture processing
 				- 1 means, launch texture processing
 			:type vs: int
@@ -352,6 +358,8 @@ class Processing():
 		p_img_sat = Process(target=self.i_img_sat)
 		p_img_sat.start()
 
+		raise ("Process")
+
 		if self.mp == Constantes.MULTIPROCESSING_DISABLE:
 			p_img_sat.join()
 		
@@ -395,23 +403,26 @@ class Processing():
 		
 	def i_rpg(self, path_rpg): 
 		"""
-		Interface function to extract mono rpg crops.
-		
-		:param path_rpg: Input RPG shapefile.
-		:type path_rpg: str
-		
-		:returns: str -- variable **Rpg.vector_used**, output no duplicated crops shapefile (path).
+			Interface function to extract mono rpg crops.
+			
+			:param path_rpg: Input RPG shapefile.
+			:type path_rpg: str
+			
+			:returns: str -- variable **Rpg.vector_used**, output no duplicated crops shapefile (path).
 		"""
 			   
-		# Extract mono rpg crops
+		# Extract rpg crops
 		mono_sample = Rpg(path_rpg, self.path_area)
+
 		# If exists, do not create a mono rpg file
-		if os.path.basename(str(path_rpg))[:5]!='MONO_':
+		if not os.path.exists("{0}/MONO_{1}".format(mono_sample.vector_folder, mono_sample.vector_name)):
 			mono_sample.mono_rpg()
 			mono_sample.create_new_rpg_files()
 		else:
-			print('MONO RPG file exists already !!!')
-		print('End of RPG processing')
+			self.logger.info('MONO RPG file exists already.')
+			mono_sample.vector_used = "{0}/MONO_{1}".format(mono_sample.vector_folder, mono_sample.vector_name)
+
+		self.logger.info('End of RPG processing')
 		
 		return mono_sample.vector_used
 		 
@@ -468,14 +479,14 @@ class Processing():
 				i_s = i_s + 1
 		# Method to stop the processus if there is not found a valid threshold
 		if i_s != 20:
-			print ('Problem in the sample processing !!!')
+			self.logger.error('Problem in the sample processing !!!')
 			sys.exit(1)
 	
 	def i_sample_rf(self):
 		"""
-		This function build a random forest trees like model to create a final classification.
-		All of This using the method described in the :func:`i_validate` function and because
-		of sklearn module.
+			This function build a random forest trees like model to create a final classification.
+			All of This using the method described in the :func:`i_validate` function and because
+			of sklearn module.
 		"""
 		
 		X_rf = []
@@ -720,7 +731,7 @@ class Processing():
 			# out_carto.zonal_stats((raster_path[ind_th+1], list_band_outraster[ind_th+1]))
 			multi_process_var.append([self.raster_path[ind_th+2], self.list_band_outraster[ind_th+2]])
 		except:
-			print('Not MNT on the 3rd step')
+			self.logger.info('Not MNT on the 3rd step')
 			multi_process_var.append([self.raster_path[ind_th+1], self.list_band_outraster[ind_th+1]])
 			multi_process_var.append([self.raster_path[ind_th+1], self.list_band_outraster[ind_th+1]])
 
diff --git a/RasterSat_by_date.py b/RasterSat_by_date.py
index 0f44f9670154e6c30dee28bc602eef592c70c06b..ec2292bc9e38ae6bbd5795d7dbc79ebba973c710 100644
--- a/RasterSat_by_date.py
+++ b/RasterSat_by_date.py
@@ -24,57 +24,61 @@ try :
 except :
 	from osgeo import gdal
 
+import Outils
 import numpy as np
 
-class RasterSat_by_date():
+class RasterSat_by_date(object):
 	"""
-	Satellite image  processing's class. This class include several processes to group images by date, mosaic images by date,
-	extract images information, compute ndvi, compute cloud pourcent and create new rasters.
-	
-	:param class_archive: Archive class name with every information on downloaded images
-	:type class_archive: class
-	:param big_folder: Image processing folder
-	:type big_folder: str
-	:param one_date: [year, month, day] ...
-			This variable is modified in the function :func:`mosaic_by_date()`. 
-			To append mosaic image path, mosaic cloud image path, cloud pixel value table, mosaic ndvi image path and ndvi pixel value table.
-	:type one_date: list of str
-	:param choice_nb_b: A option to choice output image number of band :func:`layer_rasterization` in Vector's class. If this option is 0, it take input band. By default 0.
-	:type choice_nb_b: int
-	
-	"""  
-	def __init__(self, class_archive, big_folder, one_date):
-		"""Create a new 'Landsat_by_date' instance
+		Satellite image  processing's class. 
+		This class include several processes to group images by date, mosaic images by date,
+		extract images information, compute ndvi, compute cloud pourcent and create new rasters.
 		
+		@param class_archive: Archive class name with every information on downloaded images
+		@type class_archive: class
+
+		@param big_folder: Image processing folder
+		@type big_folder: str
+
+		@param one_date: [year, month, day] ...
+				This variable is modified in the function :func:`mosaic_by_date()`. 
+				To append mosaic image path, mosaic cloud image path, cloud pixel value table,
+				 mosaic ndvi image path and ndvi pixel value table.
+		@type one_date: list of str
+
+		@param choice_nb_b: A option to choice output image number of band :
+					func: `layer_rasterization` in Vector's class. 
+					If this option is 0, it take input band. By default 0.
+		@type choice_nb_b: int	
+	"""  
+	def __init__(self, class_archive, big_folder):
 		"""
-		
+			Create a new 'Landsat_by_date' instance
+		"""
+
+		self.logger = Outils.Log("Log", "RasterSat_by_date")
 		self._class_archive = class_archive
 		self._big_folder = big_folder
-		self._one_date = one_date		
-		# Verify list of list of str
-		if one_date == []:
-			print ("Enter dates to mosaic images like [[str(year), str(month), str(day)], [str(year), str(month), str(day)], ...]")
-			sys.exit(1)
-		else:
-			self._one_date = one_date
 		
 		self.out_ds = None
 		self.choice_nb_b = 0
 		   
 	def group_by_date(self, d_uni):
 		"""
-		Function to extract images on a single date
-		
-		:param d_uni: [year, month, day]
-		:type d_uni: list of str
-		
-		:returns: list of str -- variable **group** = [year, month, day, multispectral image path, cloud image path]
+			Function to extract images on a single date
+			
+			@param d_uni: [year, month, day]
+			@type d_uni: list of str
+			
+			@returns: list of str -- variable **group** = [year, month, day, multispectral image path, cloud image path]
 		"""
 		 
 		# Group of images with the same date
 		group = []
-		 
+		
 		# Take images with the same date
+
+		# self.logger.debug(self._class_archive.list_img)
+
 		for d_dup in self._class_archive.list_img:
 			if d_dup[:3] == d_uni:
 				group.append(d_dup)
@@ -101,10 +105,10 @@ class RasterSat_by_date():
 		if vrt_translate == 'vrt':
 			# Verify input data
 			if type(src_data) is not np.ndarray and type(src_data) is not list:
-				print ('VRT file ! The data source should be composed of several data. A list minimal of 2 dimensions')
+				self.logger.error('VRT file ! The data source should be composed of several data. A list minimal of 2 dimensions')
 				sys.exit(1)
 				
-			print ('Build VRT file')
+			self.logger.info('Build VRT file')
 			if not os.path.exists(dst_data):
 				process_tocall = ['gdalbuildvrt', '-srcnodata', '-10000', dst_data]
 				
@@ -117,63 +121,73 @@ class RasterSat_by_date():
 			try :
 				src_data = str(src_data)
 			except:# if type(src_data) is not str:
-				print ('Geotiff file ! The data source should be composed of path file. A character string !')
+				self.logger.error('Geotiff file ! The data source should be composed of path file. A character string !')
 				sys.exit(1)
 				
-			print ('Build Geotiff file')
+			self.logger.info('Build Geotiff file')
 			if not os.path.exists(dst_data):
 				process_tocall = ['gdal_translate', '-a_nodata', '-10000', src_data, dst_data]
 			
 		# Launch vrt process
 		subprocess.call(process_tocall)
 		
-	def mosaic_by_date(self):
+	def mosaic_by_date(self, date):
 		"""
-		Function to merge images of the same date in a image group :func:`group_by_date`.
+			Function to merge images of the same date in a image group : func:`group_by_date`.
 		"""
 		
+		traitement_folder = "{0}/{1}".format(self._class_archive._folder, self._big_folder)
+	
 		# Create the processing images folder if not exists
-		if not os.path.exists(self._class_archive._folder + '/' + self._big_folder):
-			os.mkdir(self._class_archive._folder + '/' + self._big_folder)
-			
+		if not os.path.exists(traitement_folder):
+			os.mkdir(traitement_folder)
+
 		# Matrix multi images for a single date 
-		group = self.group_by_date(self._one_date) # Every images [year, month, day, multispectral image, cloud image]
+		group = self.group_by_date(date) # Every images [year, month, day, multispectral image, cloud image]
 		group_ = np.transpose(np.array(group)) # Transpose matrix to extract path of images
-		
+
 		# Create a folder with images year if it doesn't exist
 		index_repertory_img = self._class_archive._captor
-		if not os.path.exists(self._class_archive._folder + '/' + self._big_folder + '/' + index_repertory_img):
-			os.mkdir(self._class_archive._folder + '/' + self._big_folder + '/' + index_repertory_img)
+		sensor_folder = "{0}/{1}".format(traitement_folder, self._class_archive._captor)
 		
-		index_repertory_img = index_repertory_img + '/'
+		if not os.path.exists(sensor_folder):
+			os.mkdir(sensor_folder)
+
 		# Create a folder with images date if it doesn't exist
-		for d_ in self._one_date:
-			index_repertory_img = index_repertory_img + d_
-		
-		if not os.path.exists(self._class_archive._folder + '/' + self._big_folder + '/' + index_repertory_img):
-			os.mkdir(self._class_archive._folder + '/' + self._big_folder + '/' + index_repertory_img)
+		date_folder = "{0}/{1}".format(sensor_folder, "".join(date))
+
+		if not os.path.exists(date_folder):
+			os.mkdir(date_folder)
+
+		############################# Build VRT file with data images required #############################
 		
-		# Build VRT file with data images required
-		vrt_out = self._class_archive._folder + '/' + self._big_folder + '/' + index_repertory_img + '/' \
-					+ self._class_archive._captor + index_repertory_img.split("/")[1] + '.VRT' # Multispectral VRT outfile
+		# Multispectral VRT outfile
+		vrt_out = "{0}/{1}{2}.VRT".format(date_folder,self._class_archive._captor,os.path.basename(date_folder)) 
+
 		if not os.path.exists(vrt_out):
 			self.vrt_translate_gdal('vrt', group_[3], vrt_out)
 		
-		vrtcloud_out = self._class_archive._folder + '/' + self._big_folder + '/' + index_repertory_img + '/' \
-					+ self._class_archive._captor + index_repertory_img.split("/")[1] + '_' + group_[4][0][-7:-4] + '.VRT' # Cloud TIF outfile
+		# Cloud VRT outfile
+		vrtcloud_out = "{0}/{1}{2}_NUA.VRT".format(date_folder,self._class_archive._captor,os.path.basename(date_folder)) 
+
 		if not os.path.exists(vrtcloud_out):
 			self.vrt_translate_gdal('vrt', group_[4], vrtcloud_out)
 		
-		# Build Geotiff file with data images required
-		gtif_out = vrt_out[:-4] + '.TIF' # Multispectral VRT outfile
+		########################### Build Geotiff file with data images required ###########################
+		
+		# Multispectral TIF outfile
+		gtif_out = "{0}.TIF".format(vrt_out[:-4])
 		if not os.path.exists(gtif_out):
 			self.vrt_translate_gdal('translate', vrt_out, gtif_out)
-		self._one_date.append(gtif_out)
 		
-		gtifcloud_out = vrtcloud_out[:-4] + '.TIF' # Cloud TIF outfile
+		# Cloud TIF outfile
+		gtifcloud_out = "{0}.TIF".format(vrtcloud_out[:-4])
 		if not os.path.exists(gtifcloud_out):
 			self.vrt_translate_gdal('translate', vrtcloud_out, gtifcloud_out)
-		self._one_date.append(gtifcloud_out)
+
+		raise("STOP")
+		self.images_gtif = [gtif_out, gtifcloud_out]
+		# self._one_date.append(gtifcloud_out)
 	
 	def raster_data(self, img):
 		"""
@@ -192,12 +206,12 @@ class RasterSat_by_date():
 		gdal.AllRegister()
 		
 		# Loading input raster
-		print ('Loading input raster :', os.path.split(str(img))[1][:-4])
+		self.logger.info('Loading input raster : {0}'.format(os.path.split(str(img))[1][:-4]))
 		in_ds = gdal.Open(str(img), gdal.GA_ReadOnly)
 		
 		# if it doesn't exist
 		if in_ds is None:
-			print('could not open ')
+			self.logger.error('Could not open ')
 			sys.exit(1)
 		
 		# Information on the input raster 
@@ -254,7 +268,7 @@ class RasterSat_by_date():
 		mask_spec = np.in1d(data_spec[0], [-10000, math.isnan], invert=True) # ex : array([ True,  True,  True,  True, False,  True,  True,  True,  True], dtype=bool) -> False where there is -10000 ou NaN
 		
 		# Print area account of the pixel size 'info_spec.GetGeoTransform()'
-		print ('Area = ' + str(float((np.sum(mask_spec)  * info_spec.GetGeoTransform()[1] * abs(info_spec.GetGeoTransform()[-1]) )/10000)) + 'ha' )
+		self.logger.info('Area = ' + str(float((np.sum(mask_spec)  * info_spec.GetGeoTransform()[1] * abs(info_spec.GetGeoTransform()[-1]) )/10000)) + 'ha' )
 		
 		# Cloud mask
 		mask_cloud = np.in1d(data_cloud, 0) # This is the same opposite False where there is 0
@@ -264,10 +278,10 @@ class RasterSat_by_date():
 		# Computer cloud's percentage with dist (sum of cloud) by sum of the image's extent
 		try :
 			nb0 = float(dist)/(np.sum(mask_spec))
-			print('For ' + os.path.split(str(img_spec))[1][:-4] + ', cloudy cover ' + str(100 - round(nb0*100, 2)) + "%")
+			self.logger.info('For ' + os.path.split(str(img_spec))[1][:-4] + ', cloudy cover ' + str(100 - round(nb0*100, 2)) + "%")
 		except ZeroDivisionError:
 			nb0 = 0
-			print("The raster isn\'t in the area !")
+			self.logger.info("The raster isn\'t in the area !")
 		
 		return nb0
 	
@@ -336,7 +350,7 @@ class RasterSat_by_date():
 			# Create outfile
 			self.out_ds = driver.Create(str(out_raster), in_ds.RasterXSize, in_ds.RasterYSize, nbband, gdal.GDT_Float32)
 			if self.out_ds is None:
-				print ('Could not create ' + os.path.split(str(out_raster))[1])
+				self.logger.error('Could not create ' + os.path.split(str(out_raster))[1])
 				sys.exit(1)
 				
 			# Get extent coordinates and raster resolution
@@ -386,7 +400,7 @@ class RasterSat_by_date():
 				#Incrementation
 				p = p + 1
 			
-				print ("Copy on the band ", p)
+				self.logger.info("Copy on the band : {0}".format(p))
 	  
 				# Loading spectral band of outfile
 				out_band = self.out_ds.GetRasterBand(p) 
diff --git a/Rpg.py b/Rpg.py
index 18c722c92d8e1d523826012528c0a145067339f8..42114445901936f6dadfe893629dee7646e49cc4 100644
--- a/Rpg.py
+++ b/Rpg.py
@@ -18,35 +18,49 @@
 # along with PHYMOBAT 1.2.  If not, see <http://www.gnu.org/licenses/>.
 
 import sys, os
+import glob
 import numpy as np
-from Vector import Vector
+import Outils
+import itertools
+
 try :
     import ogr
 except :
     from osgeo import ogr
+
 from collections import *
+from Vector import Vector
 
 class Rpg(Vector):
     
     """
-    Vector class inherits the super vector class properties. This class create a new RPG shapefile
-    with mono-crops. It needs a basic RPG shapefile and a basic RPG CSV file *(...-GROUPES-CULTURE...)* in :func:`mono_rpg`.
-    
-    :param vector_used: Input/Output shapefile to clip (path)
-    :type vector_used: str
-    :param vector_cut: Area shapefile (path)
-    :type vector_cut: str
-    :param rm_dupli: Rpg table with no duplicated crops group
-    :type rm_dupli: dict
-    :param head_in_read: List of rgp header
-    :type head_in_read: list of str
-    :param min_size: Minimum size to extract a rpg polygons
-    :type min_size: float
+        Vector class inherits the super vector class properties. 
+        This class create a new RPG shapefile with mono-crops. 
+        It needs a basic RPG shapefile and a basic RPG CSV file *(...-GROUPES-CULTURE...)* in :func:`mono_rpg`.
+        
+        @param vector_used: Input/Output shapefile to clip (path)
+        @type vector_used: str
+
+        @param vector_cut: Area shapefile (path)
+        @type vector_cut: str
+
+        @param rm_dupli: Rpg table with no duplicated crops group
+        @type rm_dupli: dict
+
+        @param head_in_read: List of rgp header
+        @type head_in_read: list of str
+
+        @param min_size: Minimum size to extract a rpg polygons
+        @type min_size: float
     """
     
     def __init__(self, used, cut):
-        """Create a new 'Rpg' instance
         """
+            Create a new 'Rpg' instance
+        """
+
+        self.logger = Outils.Log("Log", "Rpg")
+
         Vector.__init__(self, used, cut)
         
         self.rm_dupli = defaultdict(list)
@@ -55,12 +69,12 @@ class Rpg(Vector):
     
     def create_new_rpg_files(self):
         """
-        Function to create new rpg shapefile with **rm_dpli** variable. The output shapefile
-        will be create in the same folder than the input shapefile with prefix *MONO_*.
-
+            Function to create new rpg shapefile with **rm_dpli** variable. The output shapefile
+            will be create in the same folder than the input shapefile with prefix *MONO_*.
         """
+
         ## The output shapefile if it exists
-        self.vector_used = os.path.split(self.vector_used)[0] + '/' + 'MONO_' + os.path.split(self.vector_used)[1].split('-')[2]
+        self.vector_used = "{0}/MONO_{1}".format(self.vector_folder, self.vector_name)
 
         if not os.path.exists(self.vector_used):
             
@@ -73,37 +87,36 @@ class Rpg(Vector):
             srsObj.MorphToESRI()
             
             # Create output file
-            out_ds = self.data_source.GetDriver().CreateDataSource(self.vector_used)
-            
-            if out_ds is None:
-                print('Could not create file')
-                sys.exit(1)
+
+            try:
+                out_ds = self.data_source.GetDriver().CreateDataSource(self.vector_used)
+
+                # Specific output layer
+                out_layer = out_ds.CreateLayer(str(self.vector_used), srsObj, geom_type=ogr.wkbMultiPolygon)
                 
-            #  Specific output layer
-            out_layer = out_ds.CreateLayer(str(self.vector_used), srsObj, geom_type=ogr.wkbMultiPolygon)
-            
-            # Add existing fields 
-            for i in range(0, len(self.head_in_read)):
-                fieldDefn = ogr.FieldDefn(self.head_in_read[i], ogr.OFTString)
-                out_layer.CreateField(fieldDefn)
-            
-            # Feature for the ouput shapefile
-            featureDefn = out_layer.GetLayerDefn()
-            
-            in_feature = shp_ogr.SetNextByIndex(0) # Polygons initialisation
-            in_feature = shp_ogr.GetNextFeature()
-            # Loop on input polygons to create a output polygons
-            while in_feature:
+                # Add existing fields 
+                for i in range(0, len(self.head_in_read)):
+                    fieldDefn = ogr.FieldDefn(self.head_in_read[i], ogr.OFTString)
+                    out_layer.CreateField(fieldDefn)
+                
+                # Feature for the ouput shapefile
+                featureDefn = out_layer.GetLayerDefn()
                 
-                id_rpg = str(in_feature.GetField(self.field_names[0]))
-                # Create a existing polygons in modified rpg list  
-                # with minimum size greater than or equal to 1 ha
-                try:
+                in_feature = shp_ogr.SetNextByIndex(0) # Polygons initialisation
+                in_feature = shp_ogr.GetNextFeature()
+
+                # Loop on input polygons to create a output polygons
+                while in_feature:
+                    
+                    id_rpg = str(in_feature.GetField(self.field_names[0]))
+                    # Create a existing polygons in modified rpg list  
+                    # with minimum size greater than or equal to 1 ha
+                   
                     if self.rm_dupli[id_rpg] and float(self.rm_dupli[id_rpg][2].replace(',','.')) >= self.min_size:
                         # Add .replace(',','.') if the input RPG contains comma instead of point
-                    
+                        
                         geom = in_feature.GetGeometryRef() # Extract input geometry
-            
+                
                         # Create a new polygon
                         out_feature = ogr.Feature(featureDefn)
                     
@@ -118,31 +131,35 @@ class Rpg(Vector):
                         # Destroy polygons
                         out_feature.Destroy()    
                         in_feature.Destroy()
-                except:
-                    pass
+                    
+                    # Next polygon
+                    in_feature = shp_ogr.GetNextFeature()
                 
-                # Next polygon
-                in_feature = shp_ogr.GetNextFeature()
-            
-            # Close data
-            out_ds.Destroy()  
+                # Close data
+                out_ds.Destroy() 
+
+            except Exception as e:
+                self.logger.error('Error during creating file : {0}'.format(e))
+                sys.exit(1)           
+                 
        
     def mono_rpg(self):        
         """
-        Function to extract no duplicated crops.
-        
+            Function to extract no duplicated crops.
         """
         
         # Table from the RPG input shapefile name
         file_tab = os.path.split(self.vector_used)[1].split('-')[len(os.path.split(self.vector_used)[1].split('-'))-1].split('_')
+
         # Define the input csv file from RPG input shapefile
         myfile = os.path.split(self.vector_used)[0] + '/' + file_tab[0] + '-' + file_tab[1] + '-GROUPES-CULTURE_' + file_tab[2] +\
                                                                 '_' + file_tab[3][:-3] + 'csv'
-        my_file = open(myfile, "r")
 
         in_read = []
-        for l in my_file.readlines(): 
-            in_read.append(l.split("\n")[0].split(";"))
+
+        with open(myfile, 'r') as my_file :
+            for l in my_file.readlines(): 
+                in_read.append(l.split("\n")[0].split(";"))
         
         # Fields name
         for y in in_read[0]:
@@ -150,11 +167,8 @@ class Rpg(Vector):
                 self.head_in_read.append(y)
             else:
                 self.head_in_read.append(y[:10])
+    
         body_in_read = list(map(list, zip(*in_read[1:]))) # Transpose table [[e,e,e],[a,a,a]] -> [[e,a],[e,a],[e,a]]
-        
-#         self.rm_dupli = [[x, body_in_read[1][body_in_read[0].index(x)], body_in_read[2][body_in_read[0].index(x)]] \
-#                                 for x in body_in_read[0] if body_in_read[0].count(x) == 1]
         for x in body_in_read[0]:
             if body_in_read[0].count(x) == 1:
                 self.rm_dupli[x] = [x, body_in_read[1][body_in_read[0].index(x)], body_in_read[2][body_in_read[0].index(x)]]
-                
\ No newline at end of file
diff --git a/Sample.py b/Sample.py
index 91ed7bc595b74c684b89ff2f7c8ffe215dca486a..c7125468742f09629b959ba77e726b39d6e54c44 100644
--- a/Sample.py
+++ b/Sample.py
@@ -20,7 +20,10 @@
 import sys, os
 import random
 import numpy as np
+
 from Vector import Vector
+import Outils
+
 try :
 	import ogr
 except :
@@ -47,11 +50,15 @@ class Sample(Vector):
 		"""
 			Create a new 'Sample' instance   
 		"""
+
+		self.logger = Outils.Log("Log", "Sample")
+
 		Vector.__init__(self, used, cut, **opt)
 		
 		self._nb_sample = nb_sample
 		self.vector_val = ''
 	
+	
 	def create_sample(self, **kwargs):
 		"""
 			Function to create a sample shapefile of a specific class
@@ -163,7 +170,7 @@ class Sample(Vector):
 		out_ds = self.data_source.GetDriver().CreateDataSource(output_sample)
 		
 		if out_ds is None:
-			print('Could not create file')
+			self.logger.error('Could not create file')
 			sys.exit(1)
 			
 		#  Specific output layer
diff --git a/Seath.py b/Seath.py
index 1894da85cddab53d80383cdf33467f45a6d3367d..fccf80552b7811a0c1f15429ebc91f88acade9f7 100644
--- a/Seath.py
+++ b/Seath.py
@@ -18,6 +18,7 @@
 # along with PHYMOBAT 2.0.  If not, see <http://www.gnu.org/licenses/>.
 
 import numpy, math, sys
+import Outils
 import numpy as np # sort, ...
 from collections import * # defaultdict
 
@@ -57,6 +58,8 @@ class Seath():
         
         self.value_1 = []
         self.value_2 = []
+
+        self.logger = Outils.Log("Log", 'Seath')
         
         self.threshold = []
         self.J = []
@@ -100,8 +103,8 @@ class Seath():
             v[mark].append(np.var(C[mark]))
             p[mark].append(1 / float(len(C[mark])))
         
-        print ("m : ", m)
-        print ("v : ", v)
+        self.logger.info("m : ", m)
+        self.logger.info("v : ", v)
         
         # Mean, standard deviation and likelihood initialisation phase for 2 classes 
         m1 = m[0]
@@ -126,12 +129,12 @@ class Seath():
             # Optimal threshold
             # Logical condition depending on article figure 2
             if ( seuil1[i] > m2[i] and seuil1[i] < m1[i] ) or ( seuil1[i] > m1[i] and seuil1[i] < m2[i] ) :
-                print ("Valid  threshold !")
+                self.logger.info("Valid  threshold !")
             else:
                 seuil1[i] = ""
             
             if ( seuil2[i] > m2[i] and seuil2[i] < m1[i] ) or ( seuil2[i] > m1[i] and seuil2[i] < m2[i] ) :
-                print ("Valid  threshold !")
+                self.logger.info("Valid  threshold !")
             else:
                 seuil2[i] = ""
         
@@ -143,20 +146,19 @@ class Seath():
             elif ( seuil1[i] == "" and seuil2[i] != "" ):
                 seuil.append(seuil2[i])
         
-        print("Bhattacharyya distance ", B)
-        print("J : ", self.J)
-        print("Threshold 1 : ", seuil1)
-        print("Threshold 2 : ", seuil2)
-        print("Optimal threshold :", seuil)
+        self.logger.info("Bhattacharyya distance ", B)
+        self.logger.info("J : ", self.J)
+        self.logger.info("Threshold 1 : ", seuil1)
+        self.logger.info("Threshold 2 : ", seuil2)
+        self.logger.info("Optimal threshold :", seuil)
         
         for i in range(len(seuil)):
             if seuil[i] != "" and m1[i] > m2[i]:
-                print('For ' + ind  + ', the class 1 > ' + str(seuil[i]))
+                self.logger.info('For ' + ind  + ', the class 1 > ' + str(seuil[i]))
                 self.threshold.append('>' + str(seuil[i]))
             elif seuil[i] != "" and m1[i] < m2[i]:
-                print('For ' + ind  + ', the class 1 < ' + str(seuil[i]))
+                self.logger.info('For ' + ind  + ', the class 1 < ' + str(seuil[i]))
                 self.threshold.append('<' + str(seuil[i]))
             else:
-                print('For ' + ind  + ', not discrimination !')
+                self.logger.error('For ' + ind  + ', not discrimination !')
                 sys.exit(1)
-#                 self.threshold.append('')
\ No newline at end of file
diff --git a/Segmentation.py b/Segmentation.py
index 18c3d8007a792f1e12ee6b0ed01fd754581d43d0..c95ed4acd972417bf6719c2f9b4fda6fabfbcffc 100644
--- a/Segmentation.py
+++ b/Segmentation.py
@@ -20,53 +20,59 @@
 import sys, os, math
 import numpy as np
 from Vector import Vector
+import Outils
+
 try :
     import ogr, gdal
 except :
     from osgeo import ogr, gdal
+
 from collections import *
 
 class Segmentation(Vector):
     
     """
-    Vector class inherits the super vector class properties. This class create the final shapefile : Cartography 
-    on a input segmentation by decision tree.
-    
-    The output classname are (**out_class_name** variable):
-        - Vegetation non naturelle
-        - Vegetation semi-naturelle
-        - Herbacees
-        - Ligneux
-        - Ligneux mixtes
-        - Ligneux denses
-        - Forte phytomasse
-        - Moyenne phytomasse
-        - Faible phytomasse
-    
-    :param vector_used: Input/Output shapefile to clip (path)
-    :type vector_used: str
-    :param vector_cut: Area shapefile (path)
-    :type vector_cut: str
-    :param output_file: Output shapefile cartography. This path is the same than the segmentation path.
-    :type output_file: str
-    :param out_class_name: List of output class name
-    :type out_class_name: list of str
-    :param out_threshold: List of output threshold
-    :type out_threshold: list of str
-    :param max_...: Biomass and density (IDM, SFS index) maximum
-    :type max_...: float
-    :param class_tab_final: Final decision tree table
-    :type class_tab_final: dict
-    :param stats_rpg_tif: Dictionnary with pxl percent in every polygon of class RPG. Mainly 'Maj_count' (Majority Value) and 'Maj_count_perc' (Majority Percent)
-    :type stats_rpg_tif: dict
+        Vector class inherits the super vector class properties. This class create the final shapefile : Cartography 
+        on a input segmentation by decision tree.
+        
+        The output classname are (**out_class_name** variable):
+            - Vegetation non naturelle
+            - Vegetation semi-naturelle
+            - Herbacees
+            - Ligneux
+            - Ligneux mixtes
+            - Ligneux denses
+            - Forte phytomasse
+            - Moyenne phytomasse
+            - Faible phytomasse
+        
+        :param vector_used: Input/Output shapefile to clip (path)
+        :type vector_used: str
+        :param vector_cut: Area shapefile (path)
+        :type vector_cut: str
+        :param output_file: Output shapefile cartography. This path is the same than the segmentation path.
+        :type output_file: str
+        :param out_class_name: List of output class name
+        :type out_class_name: list of str
+        :param out_threshold: List of output threshold
+        :type out_threshold: list of str
+        :param max_...: Biomass and density (IDM, SFS index) maximum
+        :type max_...: float
+        :param class_tab_final: Final decision tree table
+        :type class_tab_final: dict
+        :param stats_rpg_tif: Dictionnary with pxl percent in every polygon of class RPG. Mainly 'Maj_count' (Majority Value) and 'Maj_count_perc' (Majority Percent)
+        :type stats_rpg_tif: dict
     """
     
     def __init__(self, used, cut):
-        """Create a new 'Clustering' instance
-        
         """
+            Create a new 'Clustering' instance
+        """
+
+        self.logger = Outils.Log("Log", "Segmentation")
+
         Vector.__init__(self, used, cut)
-        
+       
         self.stats_rpg_tif = {}
         self.output_file =  'Final_classification.shp'
         
@@ -107,7 +113,7 @@ class Segmentation(Vector):
         out_ds = self.data_source.GetDriver().CreateDataSource(self.vector_used)
         
         if out_ds is None:
-            print('Could not create file')
+            self.logger.error('Could not create file')
             sys.exit(1)
             
         #  Specific output layer
@@ -257,7 +263,7 @@ class Segmentation(Vector):
         :type method: str
         """
         
-        print("Calcul densité : ")
+        self.logger.info("Calcul densité : ")
         if method == 'SEATH':
             distri = [v[1:] for k, v in self.stats_dict.items() if eval('v[0]' + self.out_threshold[1])]
             
@@ -274,6 +280,7 @@ class Segmentation(Vector):
             
             distri_bio = []
             distri_den = []
+
             for b in distri:
                 if self.out_threshold[distri.index(b)] in [1,2,8,9,10] and b[len(b)-1] != -10000 and b[len(b)-1] < 1:
                     distri_bio.append(b)
@@ -287,31 +294,22 @@ class Segmentation(Vector):
         t_distri_bio = list(map(list, zip(*distri_bio)))
         t_distri_den = list(map(list, zip(*distri_den)))
        
-        try:
-            # Biomass threshold
-            stdmore =  (np.mean(t_distri_bio[2]) + np.std(t_distri_bio[2]))/np.max(t_distri_bio[2])
-            stdless =  (np.mean(t_distri_bio[2]) - np.std(t_distri_bio[2]))/np.max(t_distri_bio[2])
-            self.out_threshold.append('>'+str(stdmore))
-            self.out_threshold.append('')
-            self.out_threshold.append('<'+str(stdless))
-            
-        except Exception as e:
-            print("Bio : ", t_distri_bio)
-            print("Den : ", t_distri_den)
-
-        try:            
-            # Compute density and biomass maximum
-            self.max_wood_idm = np.max(t_distri_den[1])
-            self.max_wood_sfs = np.max(t_distri_den[0])
-            self.max_bio = np.max(t_distri_bio[2])
-        except Exception as e:
-            print(e)
- 
+        # Biomass threshold
+        stdmore =  (np.mean(t_distri_bio[2]) + np.std(t_distri_bio[2]))/np.max(t_distri_bio[2])
+        stdless =  (np.mean(t_distri_bio[2]) - np.std(t_distri_bio[2]))/np.max(t_distri_bio[2])
+        self.out_threshold.append('>'+str(stdmore))
+        self.out_threshold.append('')
+        self.out_threshold.append('<'+str(stdless))
+        
+        # Compute density and biomass maximum
+        self.max_wood_idm = np.max(t_distri_den[1])
+        self.max_wood_sfs = np.max(t_distri_den[0])
+        self.max_bio = np.max(t_distri_bio[2])
         
     def append_scale(self, select_class, form):
         """
-        Function to complete the 'class_tab_final' list with density and biomass information. This list will be used to build
-        the final shapefile.
+        Function to complete the 'class_tab_final' list with density and biomass information.
+        This list will be used to build the final shapefile.
         
         :param select_class: Class name to add degree
         :type select_class: str
@@ -319,7 +317,7 @@ class Segmentation(Vector):
         :type form: str
         
         """
-        print(self.max_wood_idm)
+
         for ind_stats in range(len(self.stats_dict)):
             # Only valid on the second level
             try:
diff --git a/Slope.py b/Slope.py
index b96bf47150a8d6ff3fafc08c0dab0836da1d64c7..2325ce77c3bb7877569dcfe5675eca805da3a912 100644
--- a/Slope.py
+++ b/Slope.py
@@ -19,6 +19,7 @@
 
 import os
 import subprocess
+import Outils
 
 class Slope():
     
@@ -39,6 +40,7 @@ class Slope():
         
         self.mnt = mnt
         self.out_mnt = ''
+        self.logger.info('Log', "Slope")
         
     def extract_slope(self):
         
@@ -50,6 +52,6 @@ class Slope():
             # Launch gdaldem command line to have slope in degrees
             process_tocall = ['gdaldem', 'slope', self.mnt, self.out_mnt]
     
-            print(process_tocall)
+            self.logger.info(process_tocall)
             subprocess.call(process_tocall)
             
diff --git a/Toolbox.py b/Toolbox.py
index 3ef37903cdfbce532851c822918fd7dd3700418f..da50af53cf5f25a252e04f5227c30a399589fb5a 100644
--- a/Toolbox.py
+++ b/Toolbox.py
@@ -17,17 +17,20 @@
 # You should have received a copy of the GNU General Public License
 # along with PHYMOBAT 1.2.  If not, see <http://www.gnu.org/licenses/>.
 
+from datetime import date
+
 import os, sys, subprocess, glob
 import numpy as np
-from datetime import date
 import json
+import Outils
 
-class Toolbox():
+class Toolbox(object):
     """
         Class used to grouped small tools to cut raster or compute statistics on a raster. 
             
         :param imag: Input image (path)
         :type imag: str
+        
         :param vect: Extent shapefile (path)
         :type vect: str
     """
@@ -36,25 +39,27 @@ class Toolbox():
         """
             Create a new 'Toolbox' instance
         """
-        self.imag = ''
+        self.image = ''
         self.vect = ''
+        self.logger = Outils.Log("Log", "Toolbox")
     
     def clip_raster(self, **kwargs):
         """
-            Function to clip a raster with a vector. The raster created will be in the same folder than the input raster.
-            With a prefix *Clip_*.
+            Function to clip a raster with a vector. 
+            The raster created will be in the same folder than the input raster.
+            With a prefix 'Clip_'.
             
             :kwargs: **rm_rast** (int) - 0 (by default) or 1. Variable to remove the output raster. 0 to keep and 1 to remove.
             
             :returns: str -- variable **outclip**, output raster clip (path).
-        """    
+        """
         
         rm_rast = kwargs['rm_rast'] if kwargs.get('rm_rast') else 0
-        outclip = os.path.split(str(self.imag))[0] + '/Clip_' + os.path.split(str(self.imag))[1]
+        outclip = os.path.split(str(self.image))[0] + '/Clip_' + os.path.split(str(self.image))[1]
         if not os.path.exists(outclip) or rm_rast == 1:
-            print ('Raster clip of ' + os.path.split(str(self.imag))[1])
+            self.logger.info("Raster clip of {0}".format(os.path.split(str(self.image))[1]))
             # Command to clip a raster with a shapefile by Gdal
-            process_tocall_clip = ['gdalwarp', '-overwrite', '-dstnodata', '-10000', '-q', '-cutline', self.vect, '-crop_to_cutline', '-of', 'GTiff', self.imag, outclip]
+            process_tocall_clip = ['gdalwarp', '-overwrite', '-dstnodata', '-10000', '-q', '-cutline', self.vect, '-crop_to_cutline', '-of', 'GTiff', self.image, outclip]
             # This is a trick to remove warning with the polygons that touch themselves
             try:
                 r = subprocess.call(process_tocall_clip)
@@ -62,14 +67,16 @@ class Toolbox():
                     sys.exit(1)
                     
             except SystemExit:
-                print("Dissolve vector cut of the validation !!!")
+                self.logger.info("Dissolve vector cut of the validation !")
+                
                 vect_2 = self.vect[:-4] + '_v2.shp'
                 preprocess_tocall = 'ogr2ogr -overwrite ' + vect_2 + ' ' + self.vect + ' -dialect sqlite -sql "SELECT ST_Union(geometry), * FROM ' + \
                                         os.path.basename(self.vect)[:-4] +'"'
                 os.system(preprocess_tocall)
-                print ('Raster clip of ' + os.path.split(str(self.imag))[1])
-                process_tocall_clip = ['gdalwarp', '-overwrite', '-dstnodata', '-10000', '-q', '-cutline', vect_2, '-crop_to_cutline', '-of', 'GTiff', self.imag, outclip]
+                self.logger.info("Raster clip of {0}".format(os.path.split(str(self.image))[1]))
+                process_tocall_clip = ['gdalwarp', '-overwrite', '-dstnodata', '-10000', '-q', '-cutline', vect_2, '-crop_to_cutline', '-of', 'GTiff', self.image, outclip]
                 subprocess.call(process_tocall_clip)
+                
                 for rem in glob.glob(vect_2[:-4] + '*'):
                     os.remove(rem)
                     
@@ -100,9 +107,9 @@ class Toolbox():
         # For the ndvi stats
         # In the input table the ndvi floor is the 7th
         stack_ndvi = np.dstack(table[7]) # Like cloud table, stack ndvi table
-    #     mask_ndvi = np.ma.masked_equal(stack_ndvi, -10000, copy=True) # Mask values -10000
+        # mask_ndvi = np.ma.masked_equal(stack_ndvi, -10000, copy=True) # Mask values -10000
         mask_cloud = np.ma.masked_where((stack_ndvi == -10000) | (tab_cloud != 0), tab_cloud) # Mask values -10000 and > 0(Not cloud)
-    #    mask_cloud = (tab_cloud != 0) | (stack_ndvi == -10000)
+        # mask_cloud = (tab_cloud != 0) | (stack_ndvi == -10000)
         tab_ndvi_masked = np.ma.array(stack_ndvi, mask=mask_cloud.mask) # mask_cloud.mask) # Ndvi table with clear values 
         
         # Stats on the indexes defined above
@@ -136,14 +143,14 @@ class Toolbox():
         epsg_phymobat = '2154'
         proj_phymobat = 'AUTHORITY["EPSG","' + epsg_phymobat + '"]'
 
-        info_gdal = 'gdalinfo -json ' + self.imag
+        info_gdal = 'gdalinfo -json ' + self.image
         info_json = json.loads(subprocess.check_output(info_gdal, shell=True))
         # Check if projection is in Lambert 93
         if not proj_phymobat in info_json['coordinateSystem']['wkt']:
-            output_proj = self.imag[:-4] + '_L93.tif'
-            reproj = 'gdalwarp -t_srs EPSG:' + epsg_phymobat + ' ' + self.imag + ' ' + output_proj
+            output_proj = self.image[:-4] + '_L93.tif'
+            reproj = 'gdalwarp -t_srs EPSG:' + epsg_phymobat + ' ' + self.image + ' ' + output_proj
             os.system(reproj)
             # Remove old file and rename new file like the old file
-            os.remove(self.imag)
-            os.rename(output_proj, self.imag)
+            os.remove(self.image)
+            os.rename(output_proj, self.image)
         
\ No newline at end of file
diff --git a/Vector.py b/Vector.py
index c97590cea85cb9159a2b409b9fd68c9075e9a736..cdc08736a88ea61e2b7367114f8c0d5a9165eede 100644
--- a/Vector.py
+++ b/Vector.py
@@ -20,208 +20,209 @@
 import os, sys
 import subprocess
 import numpy as np
+
 try :
-    import ogr, gdal
+	import ogr, gdal
 except :
-    from osgeo import ogr, gdal
-    
-# from rasterstats import *
+	from osgeo import ogr, gdal
+	
 import rasterstats
 from collections import *
-
+import Outils
 from RasterSat_by_date import RasterSat_by_date
 
 class Vector():
-    """    
-    Vector class to extract a area, vector data and zonal statistic (``rasterstats 0.3.2 package``)
-    
-    :param vector_used: Input/Output shapefile to clip (path)
-    :type vector_used: str
-    :param vector_cut: Area shapefile (path)
-    :type vector_cut: str
-    :param data_source: Input shapefile information
-    :type data_source: ogr pointer
-    :param stats_dict: ``Rasterstats`` results
-    :type stats_dict: dict
-    :param raster_ds: Raster information for a probably rasterization
-    :type raster_ds: gdal pointer
-    :param remove_shp: Remove shapefile or not. 0 : don't remove, 1 : remove
-    :type remove_shp: int
-    :opt: **Remove** (int) - For the remove_shp variable
-    
-    """
-      
-    def __init__(self, used, cut, **opt):
-        """Create a new 'Vector' instance
-               
-        """
-        self.vector_cut = cut
-        self.vector_used = used
-        self.remove_shp = opt['Remove'] if opt.get('Remove') else 0
-
-        self.clip_vector()
-        
-        self.data_source = ''
-        if self.data_source == '':
-            self.vector_data()
-            
-        # List of field name
-        self.field_names = [self.data_source.GetLayer().GetLayerDefn().GetFieldDefn(l).GetName() \
-                       for l in range(self.data_source.GetLayer().GetLayerDefn().GetFieldCount())]
-            
-        self.stats_dict = defaultdict(list)
-        self.raster_ds = None
-        
-        
-    
-    def clip_vector(self):
-        """
-        Function to clip a vector with a vector
-        
-        """    
-        
-        outclip = os.path.split(self.vector_used)[0] + '/Clip_' + os.path.split(self.vector_used)[1]
-        if not os.path.exists(outclip) or self.remove_shp == 1:
-            print ('Clip of ' + os.path.split(self.vector_used)[1])
-            # Command to clip a vector with a shapefile by OGR
-            process_tocall_clip =  ['ogr2ogr', '-overwrite', '-skipfailures', outclip, self.vector_used, '-clipsrc', self.vector_cut]
-            subprocess.call(process_tocall_clip)
-        
-        # Replace input filename by output filename
-        self.vector_used = outclip
-        
-    def vector_data(self):
-        """
-        Function to extract vector layer information 
-        
-        """  
-        
-        # import ogr variable
-        self.data_source = ogr.GetDriverByName('ESRI Shapefile').Open(self.vector_used, 0)
-        
-        if self.data_source is None:
-            print('Could not open file')
-            sys.exit(1)
-        
-        print('Shapefile opening : ' + self.data_source.GetLayer().GetName())
-
-    def close_data(self):
-        """
-        Function to remove allocate memory            
-        """        
-        
-        # Close data sources
-        self.data_source.Destroy()
-        
-        print('Shapefile closing : ' + self.data_source.GetLayer().GetName())
-    
-    def zonal_stats(self, inraster, band, **kwargs):
-        """
-        Function to compute the mean in every polygons for a raster
-        because of package ``rasterstats`` in */usr/local/lib/python2.7/dist-packages/rasterstats-0.3.2-py2.7.egg/rasterstats/*
-        
-        :param (inraster,band): inraster -> Input image path, and band -> band number
-        :type (inraster,band): tuple
-        :kwargs: **rank** (int) - Zonal stats ranking launch
-        
-                **nb_img** (int) - Number images launched with zonal stats 
-        """
-        
-        ranking = kwargs['rank'] if kwargs.get('rank') else 0
-        nb_img = kwargs['nb_img'] if kwargs.get('nb_img') else 1
-
-        print('Compute ' + os.path.split(self.vector_used)[1] + ' stats on ' + os.path.split(inraster)[1])
-        stats = rasterstats.zonal_stats(self.vector_used, inraster,  stats =['mean'], nodata=float('nan'), band=band)
-        for i in range(len(stats)):
-            temp = defaultdict(lambda : [0]*nb_img)
-            for j in range(nb_img):
-                try :
-                    temp[0][j] = self.stats_dict[i][j]
-                except IndexError:
-                    pass
-
-            # temp[0][ranking] = stats[i].values()[1]
-            temp[0][ranking] = list(stats)[i]['mean']
-            self.stats_dict[i] = temp[0]
-            
-        print('End of stats on ' + os.path.split(str(inraster))[1])
-
-    def zonal_stats_pp(self, inraster):
-        """
-          A zonal statistics ++ to dertermine pxl percent in every polygon
-        
-          :param inraster: Input image path
-          :type inraster: str 
-          :returns: dict -- **p_stats** : dictionnary with pxl percent in every polygon. Mainly 'Maj_count' (Majority Value) and 'Maj_count_perc' (Majority Percent)
-        
-        """
-        
-        p_stats = rasterstats.zonal_stats(self.vector_used, inraster, stats=['count'], copy_properties=True, categorical=True)
-        
-        for i in range(len(p_stats)):
-            percent = 0.0
-            for p in p_stats[i].keys():
-                if type(p) == np.float32 and p_stats[i][p]/float(p_stats[i]['count'])*100 > percent:
-                    p_stats[i]['Maj_count'] = p
-                    p_stats[i]['Maj_count_perc'] = p_stats[i][p]/float(p_stats[i]['count'])*100
-                    percent = p_stats[i]['Maj_count_perc']
-            if not 'Maj_count' in p_stats[i].keys() or not 'Maj_count_perc' in p_stats[i].keys():
-                p_stats[i]['Maj_count']=0
-                p_stats[i]['Maj_count_perc']=0
-        
-        return p_stats
-                
-    def layer_rasterization(self, raster_head, attribute_r, **kwargs):
-        """
-        Function to rasterize a vector. Define resolution, projection of the output raster with a raster head.
-        And complete the gdal pointer empty properties with the layer's information of the vector and a defined field.
-        If a raster has several band, in option you can choice if you want one band or more. 
-        
-        :param raster_head: Raster path that will look like the final raster of the rasterization
-        :type raster_head: str
-        :param attribute_r: Field name of the shapefile that contains class names
-        :type attribute_r: str
-        :kwargs: **choice_nb_b** (int) - Output image number of band. If you choice 1, take first band. If you choice 2, take two first band etc... 
-        :returns: str -- **valid_raster** : output raster path from the rasterization
-        
-        """
-        
-        # Export a example of a raster out information
-        # for the validation shapefile
-        example_raster = RasterSat_by_date('', '', [0]) # Call the raster class
-        example_raster.choice_nb_b = kwargs['choice_nb_b'] if kwargs.get('choice_nb_b') else 0
-        raster_info = example_raster.raster_data(raster_head)# Extract data info
-        
-        # Define the validation's vector
-        valid_raster = self.vector_used[:-3]+'TIF' # Name of the output raster
-        if os.path.exists(str(valid_raster)):
-            os.remove(valid_raster)
-            
-        # Create the empty raster with the same properties
-        # Condition for the rasters with several bands
-        if raster_info[1].RasterCount > 1:
-            data_raster = raster_info[0][0]
-        else:
-            data_raster = raster_info[0]
-        info_out = example_raster.create_raster(valid_raster, data_raster, raster_info[1])
-        self.raster_ds = example_raster.out_ds
-        
-        # Virtual rasterize the vector 
-        pt_rast = gdal.RasterizeLayer(self.raster_ds, [1], self.data_source.GetLayer(), \
-                                      options=["ATTRIBUTE=" + str(attribute_r)])
-        
-        if pt_rast != 0:
-            raise Exception("error rasterizing layer: %s" % pt_rast)
-        
-        new_data = self.raster_ds.ReadAsArray()
-        
-        self.raster_ds = None
-        # Complete the raster creation
-        example_raster.complete_raster(*info_out, new_data)
-        
-        return valid_raster
-        
-        
-        
-        
+	"""	
+		Vector class to extract a area, vector data and zonal statistic (``rasterstats 0.3.2 package``)
+		
+		@param vector_used: Input/Output shapefile to clip (path)
+		@type vector_used: str
+
+		@param vector_cut: Area shapefile (path)
+		@type vector_cut: str
+
+		@param vector_name: Name of the shapefile
+		@type vector_name: str
+
+		@param vector_folder: Name of the folder containing the shapefile
+		@type vector_name: str
+
+		@param data_source: Input shapefile information
+		@type data_source: ogr pointer
+
+		@param stats_dict: ``Rasterstats`` results
+		@type stats_dict: dict
+
+		@param raster_ds: Raster information for a probably rasterization
+		@type raster_ds: gdal pointer
+
+		@param remove_shp: Remove shapefile or not. 0 : don't remove, 1 : remove
+		@type remove_shp: int
+
+		@opt: **Remove** (int) - For the remove_shp variable
+	"""
+	  
+	def __init__(self, used, cut, **opt):
+		"""
+			Create a new 'Vector' instance   
+		"""
+
+		self.logger = Outils.Log("Log", 'Vector')
+
+		self.vector_name = os.path.basename(used)
+		self.vector_folder = os.path.dirname(used)
+
+		self.vector_cut = cut
+		self.vector_used = used
+		self.remove_shp = opt['Remove'] if opt.get('Remove') else 0
+
+		self.clip_vector()
+
+		self.vector_data()
+	
+		# List of field name
+		self.field_names = [self.data_source.GetLayer().GetLayerDefn().GetFieldDefn(l).GetName() \
+					   for l in range(self.data_source.GetLayer().GetLayerDefn().GetFieldCount())]
+			
+		self.stats_dict = defaultdict(list)
+		self.raster_ds = None
+	
+	def clip_vector(self):
+		"""
+			Function to clip a vector with a vector
+		"""	
+	
+		outclip = "{0}/Clip_{1}".format(self.vector_folder, self.vector_name)
+
+		if not os.path.exists(outclip) or self.remove_shp == 1:
+			self.logger.info('Clip of {0}'.format(self.vector_name))
+			# Command to clip a vector with a shapefile by OGR
+			process_tocall_clip =  ['ogr2ogr', '-overwrite', '-skipfailures', outclip, self.vector_used, '-clipsrc', self.vector_cut]
+			subprocess.call(process_tocall_clip)
+		
+		# Replace input filename by output filename
+		self.vector_used = outclip
+		
+	def vector_data(self):
+		"""
+			Function to extract vector layer information 
+		"""  
+		try:
+			self.data_source = ogr.GetDriverByName('ESRI Shapefile').Open(self.vector_used, 0)
+			self.logger.info('Shapefile opening : {0}'.format(self.data_source.GetLayer().GetName()))
+		except :
+			self.logger.error('Could not open file')
+			sys.exit(1)		
+
+	def close_data(self):
+		"""
+			Function to remove allocate memory			
+		"""		
+		
+		# Close data sources
+		self.data_source.Destroy()
+		
+		self.logger.info('Shapefile closing : {0}'.format(self.data_source.GetLayer().GetName()))
+	
+	def zonal_stats(self, inraster, band, **kwargs):
+		"""
+			Function to compute the mean in every polygons for a raster
+			because of package ``rasterstats`` in */usr/local/lib/python2.7/dist-packages/rasterstats-0.3.2-py2.7.egg/rasterstats/*
+			
+			:param (inraster,band): inraster -> Input image path, and band -> band number
+			:type (inraster,band): tuple
+			:kwargs: **rank** (int) - Zonal stats ranking launch
+			
+					**nb_img** (int) - Number images launched with zonal stats 
+		"""
+		
+		ranking = kwargs['rank'] if kwargs.get('rank') else 0
+		nb_img = kwargs['nb_img'] if kwargs.get('nb_img') else 1
+
+		self.logger.info('Compute {0} stats on {1}'.format(os.path.split(self.vector_used)[1], os.path.split(inraster)[1]))
+		stats = rasterstats.zonal_stats(self.vector_used, inraster,  stats =['mean'], nodata=float('nan'), band=band)
+		for i in range(len(stats)):
+			temp = defaultdict(lambda : [0]*nb_img)
+			for j in range(nb_img):
+				try :
+					temp[0][j] = self.stats_dict[i][j]
+				except IndexError:
+					pass
+
+			temp[0][ranking] = list(stats)[i]['mean']
+			self.stats_dict[i] = temp[0]
+			
+		self.logger.info('End of stats on {0}'.format(os.path.split(str(inraster))[1]))
 
+	def zonal_stats_pp(self, inraster):
+		"""
+			A zonal statistics ++ to dertermine pxl percent in every polygon
+		
+			:param inraster: Input image path
+			:type inraster: str 
+			:returns: dict -- **p_stats** : dictionnary with pxl percent in every polygon. Mainly 'Maj_count' (Majority Value) and 'Maj_count_perc' (Majority Percent)
+		"""
+		
+		p_stats = rasterstats.zonal_stats(self.vector_used, inraster, stats=['count'], copy_properties=True, categorical=True)
+		
+		for i in range(len(p_stats)):
+			percent = 0.0
+			for p in p_stats[i].keys():
+				if type(p) == np.float32 and p_stats[i][p]/float(p_stats[i]['count'])*100 > percent:
+					p_stats[i]['Maj_count'] = p
+					p_stats[i]['Maj_count_perc'] = p_stats[i][p]/float(p_stats[i]['count'])*100
+					percent = p_stats[i]['Maj_count_perc']
+			if not 'Maj_count' in p_stats[i].keys() or not 'Maj_count_perc' in p_stats[i].keys():
+				p_stats[i]['Maj_count']=0
+				p_stats[i]['Maj_count_perc']=0
+		
+		return p_stats
+				
+	def layer_rasterization(self, raster_head, attribute_r, **kwargs):
+		"""
+			Function to rasterize a vector. Define resolution, projection of the output raster with a raster head.
+			And complete the gdal pointer empty properties with the layer's information of the vector and a defined field.
+			If a raster has several band, in option you can choice if you want one band or more. 
+			
+			:param raster_head: Raster path that will look like the final raster of the rasterization
+			:type raster_head: str
+			:param attribute_r: Field name of the shapefile that contains class names
+			:type attribute_r: str
+			:kwargs: **choice_nb_b** (int) - Output image number of band. If you choice 1, take first band. If you choice 2, take two first band etc... 
+			:returns: str -- **valid_raster** : output raster path from the rasterization
+		"""
+		
+		# Export a example of a raster out information
+		# for the validation shapefile
+		example_raster = RasterSat_by_date('', '', [0]) # Call the raster class
+		example_raster.choice_nb_b = kwargs['choice_nb_b'] if kwargs.get('choice_nb_b') else 0
+		raster_info = example_raster.raster_data(raster_head)# Extract data info
+		
+		# Define the validation's vector
+		valid_raster = self.vector_used[:-3]+'TIF' # Name of the output raster
+		if os.path.exists(str(valid_raster)):
+			os.remove(valid_raster)
+			
+		# Create the empty raster with the same properties
+		# Condition for the rasters with several bands
+		if raster_info[1].RasterCount > 1:
+			data_raster = raster_info[0][0]
+		else:
+			data_raster = raster_info[0]
+		info_out = example_raster.create_raster(valid_raster, data_raster, raster_info[1])
+		self.raster_ds = example_raster.out_ds
+		
+		# Virtual rasterize the vector 
+		pt_rast = gdal.RasterizeLayer(self.raster_ds, [1], self.data_source.GetLayer(), \
+									  options=["ATTRIBUTE=" + str(attribute_r)])
+		
+		if pt_rast != 0:
+			raise Exception("error rasterizing layer: %s" % pt_rast)
+		
+		new_data = self.raster_ds.ReadAsArray()
+		
+		self.raster_ds = None
+		# Complete the raster creation
+		example_raster.complete_raster(*info_out, new_data)
+		
+		return valid_raster
\ No newline at end of file
diff --git a/Vhrs.py b/Vhrs.py
index b03c857f855157062e1fc9a68b7a6f57226c7500..5b02d673fcc93cd6d3e80583bf6495b4462cf964 100644
--- a/Vhrs.py
+++ b/Vhrs.py
@@ -19,6 +19,7 @@
 
 import os
 import subprocess
+import Outils
 from multiprocessing import Process
 
 class Vhrs():
@@ -44,21 +45,22 @@ class Vhrs():
         
         self._imag = imag
         self.mp = mp
+        self.logger = Outils.Log('Log', "VHRS")
         
-        print('SFS image')
+        self.logger.info('SFS image')
         self.out_sfs = self._imag[:-4] + '_sfs.TIF'
         if not os.path.exists(self.out_sfs):
-            print('SFS image doesn\'t exist !')
+            self.logger.info('SFS image doesn\'t exist !')
             p_sfs = Process(target=self.sfs_texture_extraction)
             p_sfs.start()
             if mp == 0:
                 p_sfs.join()
 #             self.sfs_texture_extraction()
             
-        print('Haralick image')
+        self.logger.info('Haralick image')
         self.out_haralick = self._imag[:-4] + '_haralick.TIF'
         if not os.path.exists(self.out_haralick):
-            print('Haralick image doesn\'t exist !')
+            self.logger.info("Haralick image doesn't exist !")
             p_har = Process(target=self.haralick_texture_extraction, args=('simple', ))
             p_har.start()
             if mp == 0:
@@ -93,7 +95,7 @@ class Vhrs():
         process_tocall = ['otbcli_SFSTextureExtraction', '-in', self._imag, '-channel', '2', '-parameters.spethre', '50.0', \
                           '-parameters.spathre', '100', '-out', self.out_sfs]
         
-        print(process_tocall)
+        self.logger.info(process_tocall)
         subprocess.call(process_tocall) 
         
     def haralick_texture_extraction(self, texture_choice):
@@ -128,6 +130,6 @@ class Vhrs():
         process_tocall =  ['otbcli_HaralickTextureExtraction', '-in', self._imag, '-channel', '2', '-parameters.xrad', '3', \
                            '-parameters.yrad', '3', '-texture', texture_choice, '-out', self.out_haralick]
         
-        print(process_tocall)
+        self.logger.info(process_tocall)
         subprocess.call(process_tocall) 
         
\ No newline at end of file