diff --git a/Archive.py b/Archive.py
index 7c83929204ca445d79f806d81339db7c82fc1426..1b9b862ab06947ee36285b8b244f4de51bbf147f 100644
--- a/Archive.py
+++ b/Archive.py
@@ -53,14 +53,13 @@ class Archive():
         :type repertory: str
     """
     
-    def __init__(self, captor, list_year, box, folder, repertory, proxy_enabled):
+    def __init__(self, captor, list_year, box, repertory, proxy_enabled):
         """
             Create a new 'Archive' instance
         """
         self._captor = captor
         self._list_year = list_year.split(";")
         self._box = box
-        self._folder = folder
         self._repertory = repertory
         self._proxy_enabled = proxy_enabled
         
@@ -241,9 +240,7 @@ class Archive():
             # Initialisation variable for a next page 
             # There is a next page, next = 1
             # There isn't next page, next = 0
-            next_ = 1
-            if not os.path.exists("{0}/{1}".format(self._folder ,self._repertory)):
-                os.mkdir("{0}/{1}".format(self._folder ,self._repertory))
+            next_ = 1   
                 
             # To know path to download images
             while next_ == 1:
@@ -267,7 +264,7 @@ class Archive():
                         link_archive = data_Dict['features'][d]['properties']['services']['download']['url'].replace("\\", "")
                         url_archive = link_archive.replace(self.resto, "rocket/#")
                         archive_download = url_archive.replace("/download", "") # Path to download
-                        out_archive = self._folder + '/' + self._repertory + '/' + name_archive + '.tgz' # Name after download
+                        out_archive = "{0}/{1}.tgz".format(self._repertory, name_archive)# Name after download
                         # if len(self.list_archive) == 0 : self.list_archive.append([archive_download, out_archive, feature_id])
                         self.list_archive.append([archive_download, out_archive, feature_id]) 
                     
@@ -360,14 +357,14 @@ class Archive():
             except IndexError:
                 self.logger.info ("=============== {0} ===============".format(first_date))
             
-            img_in_glob = sorted(glob.glob("{0}/{1}/*gz".format(self._folder, self._repertory)))
+            img_in_glob = sorted(glob.glob("{0}/*gz".format(self._repertory)))
             
             if not img_in_glob :
                 self.logger.error("There isn't tgzfile in the folder")
                 sys.exit(1)
             else:
                 # Create a folder "Unpack"
-                folder_unpack = "{0}/{1}/Unpack".format(self._folder, self._repertory)
+                folder_unpack = "{0}/Unpack".format(self._repertory)
                 
                 if not os.path.isdir(folder_unpack):
                     os.mkdir(folder_unpack)
diff --git a/PHYMOBAT.py b/PHYMOBAT.py
index 13804e1b73c73ab7a3f174612f6a36441052e5e8..3a4db08e17f6e22d364b7e263d02ca329198f5dc 100644
--- a/PHYMOBAT.py
+++ b/PHYMOBAT.py
@@ -80,7 +80,8 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 		self.initUI()
 
 		# Chargement et lancement automatique pour les tests à supprimer
-		self.open_backup("/home/commandre/Documents/data_test_2/2018.xml")
+		# self.open_backup("/media/commandre/8ce2dd88-1425-4161-a3c1-47f566c0285a/data_test/2018.xml")
+		self.open_backup("/media/commandre/8ce2dd88-1425-4161-a3c1-47f566c0285a/Gironde_Test/gironde_total.xml")
 		self.ok_button()
 		
 	def initUI(self):
@@ -945,10 +946,8 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 
 			# Compute optimal threshold 
 			self.i_sample_rf()
-				
+
 			# Classification processing 
-			self.out_fieldname_carto = self.out_fieldname_carto
-			self.out_fieldtype_carto = self.out_fieldtype_carto 
 			self.i_classifier_rf()
 
 			self.i_validate()
@@ -957,6 +956,7 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 		#self.clear_sample()
 		self.out_fieldname_carto = ['ID', 'AREA']
 		self.out_fieldtype_carto = [ogr.OFTString, ogr.OFTReal]
+
 		# Images after processing images
 		self.out_ndvistats_folder_tab = defaultdict(list)
 		
@@ -964,7 +964,7 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 		
 		# End of the processus
 		endTime = time.time()
-		self.logger.info('........... Outputted to File in {0} secondes'.format(endTime - startTime))
+		self.logger.info('Outputted to File in {0} secondes'.format(endTime - startTime))
 		nb_day_processing = int(time.strftime('%d', time.gmtime(endTime - startTime))) - 1
 		self.logger.info("That is {0} day(s) {1}".format(nb_day_processing, time.strftime('%Hh %Mmin%S', time.gmtime(endTime - startTime))))
 		
@@ -972,6 +972,8 @@ class PHYMOBAT(QtWidgets.QMainWindow, Processing):
 		"""
 			Function to load input text in every fields. The input file must be a XML file.
 		"""
+
+		# in_backup = QtWidgets.QFileDialog.getOpenFileName(self, "Open backup", os.getcwd(), '*.xml')[0]
 		
 		if test is None :
 			in_backup = QtWidgets.QFileDialog.getOpenFileName(self, "Open backup", os.getcwd(), '*.xml')[0]
diff --git a/Precision_moba.py b/Precision_moba.py
index b0678bd7da61230f8ca5d41a7e84230badac8afa..f54e63a590cecb5ada1d31be4b56ba22f241f2b1 100644
--- a/Precision_moba.py
+++ b/Precision_moba.py
@@ -97,6 +97,13 @@ class Precision_moba():
 		
 		# Define vector
 		vector_pr = Vector(shp_pr, self.path_cut, **opt)
+
+		# self.logger.debug(shp_pr)
+		# self.logger.debug(self.path_cut)
+		# raise(BaseException("Precision_moba"))
+
+		# vector_pr.clip_vector()
+		vector_pr.vector_data()
 		
 		# Define Toolbox used
 		current_img = Toolbox()
diff --git a/Processing.py b/Processing.py
index 3c115aad20e6236d33c65b615768b575890f1374..7d10993bc9bb55bdfc6153e841efc0c2dc6570be 100644
--- a/Processing.py
+++ b/Processing.py
@@ -224,13 +224,16 @@ class Processing(object):
 			in the archive folder (**folder_archive**).
 		"""
 		
-		folder_archive = self.captor_project + '_PoleTheia'
+		folder_archive = "{0}/{1}_PoleTheia".format(self.folder_processing, self.captor_project)
 
-		self.check_download = Archive(self.captor_project, self.classif_year, self.path_area, self.path_folder_dpt, folder_archive, self.w_proxy)
+		if not os.path.exists(folder_archive) :
+			os.makedirs(folder_archive)
+
+		self.check_download = Archive(self.captor_project, self.classif_year, self.path_area, folder_archive, self.w_proxy)
 		self.nb_avalaible_images = self.check_download.listing()
 		self.check_download.download_auto(self.user, self.password)
 		self.check_download.decompress()
-		 
+
 	def i_img_sat(self):	
 		"""
 			Interface function to processing satellite images:
@@ -251,22 +254,26 @@ class Processing(object):
 				>>> stats_test = RasterSat_by_date(class_archive, big_folder, one_date)
 				>>> stats_test.complete_raster(stats_test.create_raster(in_raster, stats_data, in_ds), stats_data)
 		"""
+		
+		current_list = Toolbox()
+		current_list.vect = self.path_area
 
 		# Map projection of the image and the cloud mask
 		for clip_index, clip in enumerate(self.check_download.list_img):
-			current_list = Toolbox()
+			
 			current_list.image = clip[3]
-			current_list.vect = self.path_area
-
+			
 			current_list.check_proj() # Check if projection is RFG93 
 			self.check_download.list_img[clip_index][3] = current_list.clip_raster() # Multispectral images
 			
 			current_list.image = clip[4]
 			current_list.check_proj() # Check if projection is RFG93 
+
 			self.check_download.list_img[clip_index][4] = current_list.clip_raster() # Cloud images
-		   
+
 		# Images pre-processing
 		spectral_out = []
+
 		check_L8 = RasterSat_by_date(self.check_download, self.folder_processing)
 
 		for date in self.check_download.single_date:
@@ -289,7 +296,7 @@ class Processing(object):
 		stats_ndvi, stats_cloud = current_list.calc_serie_stats(spectral_trans)
 
 		# Stats cloud raster		
-		stats_cloud_raster = "{0}/{1}/{2}/Cloud_number_{3}.TIF".format(check_L8._class_archive._folder, check_L8._big_folder, self.captor_project, self.captor_project)
+		stats_cloud_raster = "{0}/Cloud_number_{1}.TIF".format(self.folder_processing, self.captor_project)
 
 		check_L8.complete_raster(\
 			*check_L8.create_empty_raster(stats_cloud_raster, stats_cloud,\
@@ -298,7 +305,7 @@ class Processing(object):
 
 		# Stats ndvi rasters		
 		for stats_index in range(len(stats_ndvi)):
-			out_ndvistats_raster = "{0}/{1}/{2}/{3}_{4}.TIF".format(check_L8._class_archive._folder, check_L8._big_folder, self.captor_project, stats_name[stats_index], self.captor_project)
+			out_ndvistats_raster = "{0}/{1}_{2}.TIF".format(self.folder_processing, stats_name[stats_index], self.captor_project)
 			self.out_ndvistats_folder_tab[stats_index] = out_ndvistats_raster
 			check_L8.complete_raster(\
 				*check_L8.create_empty_raster(out_ndvistats_raster, stats_ndvi[stats_index],\
@@ -306,6 +313,7 @@ class Processing(object):
 				 stats_ndvi[stats_index])
 
 		check_L8.logger.close()
+		current_list.logger.close()
 
 	def i_slope(self):
 		"""
@@ -341,6 +349,8 @@ class Processing(object):
 
 		current_path_ortho.image = self.path_ortho
 		current_path_ortho.vect = self.path_area
+		current_path_ortho.output = "{0}/MosaiqueOrtho/Clip_{1}".format(self.folder_processing, os.path.basename(self.path_ortho))
+
 		path_ortho = current_path_ortho.clip_raster()
 		
 		texture_irc = Vhrs(path_ortho, self.mp)
@@ -364,22 +374,28 @@ class Processing(object):
 		mgr.start()
 
 		self.out_ndvistats_folder_tab = mgr.defaultdict(list)
+
+		self.i_vhrs()
+		self.i_img_sat()
+		
 		
-		p_img_sat = Process(target=self.i_img_sat)
-		p_vhrs = Process(target=self.i_vhrs)
+		# p_img_sat = Process(target=self.i_img_sat)
+		# p_vhrs = Process(target=self.i_vhrs)
 
-		self.logger.debug("Multiprocessing : {0}".format(self.mp))
+		# self.logger.debug("Multiprocessing : {0}".format(self.mp))
 		
-		if self.mp == Constantes.MULTIPROCESSING_DISABLE:
-			p_img_sat.start()
-			p_img_sat.join()
-			p_vhrs.start()
-			p_vhrs.join()	
-		else :
-			p_img_sat.start()
-			p_vhrs.start()
-			p_img_sat.join()
-			p_vhrs.join()
+		# if self.mp == Constantes.MULTIPROCESSING_DISABLE:
+		# 	p_img_sat.start()
+		# 	p_img_sat.join()
+		# 	p_vhrs.start()
+		# 	p_vhrs.join()	
+		# else :
+		# 	p_img_sat.start()
+		# 	p_vhrs.start()
+		# 	p_img_sat.join()
+		# 	p_vhrs.join()
+
+		# raise('i_images_processing')
 
 		###################################################################
 
@@ -413,22 +429,30 @@ class Processing(object):
 			
 			@returns: str -- variable **Rpg.vector_used**, output no duplicated crops shapefile (path).
 		"""
-			   
+
+		self.folder_processing = "{0}/{1}/{2}".\
+			format(self.path_folder_dpt, self.folder_processing, self.captor_project)
+
+		dossier_rpg = "{0}/RPG".format(self.folder_processing)
+		
 		# Extract rpg crops
 		mono_sample = Rpg(path_rpg, self.path_area)
+		
+		mono_sample.clip_vector(dossier_rpg)
+		mono_sample.vector_data()
 
 		# If exists, do not create a mono rpg file
-		if not os.path.exists("{0}/MONO_{1}".format(mono_sample.vector_folder, mono_sample.vector_name)):
-			mono_sample.mono_rpg()
+		if not os.path.exists("{0}/MONO_{1}".format(dossier_rpg, mono_sample.vector_name)):
+			mono_sample.mono_rpg(path_rpg)
 			mono_sample.create_new_rpg_files()
 		else:
 			self.logger.info('MONO RPG file exists already.')
-			mono_sample.vector_used = "{0}/MONO_{1}".format(mono_sample.vector_folder, mono_sample.vector_name)
+			mono_sample.vector_used = "{0}/MONO_{1}".format(dossier_rpg, mono_sample.vector_name)
 
 		self.logger.info('End of RPG processing')
 
 		mono_sample.logger.close()
-		
+
 		return mono_sample.vector_used
 		 
 	def i_sample(self):
@@ -461,9 +485,18 @@ class Processing(object):
 					kwargs = {}
 					kwargs['fieldname'] = self.fieldname_args[sple]
 					kwargs['class'] = self.class_args[sple]
-					sample_rd[sple] = Sample(self.sample_name[sple/2], self.path_area, self.list_nb_sample[sple/2])
+					sample_rd[sple] = Sample(self.sample_name[round(sple/2)], self.path_area,\
+					 self.list_nb_sample[round(sple/2)])
+
+					sample_rd[sple].clip_vector("{0}/{1}".format(self.folder_processing, os.path.basename(\
+						os.path.dirname(self.sample_name[round(sple/2)]))))
+
+					sample_rd[sple].vector_data()
+					sample_rd[sple].output_folder = self.folder_processing
+
 					sample_rd[sple].create_sample(**kwargs)
-					sample_rd[sple].zonal_stats((self.raster_path[sple/2], self.list_band_outraster[sple/2]))
+					sample_rd[sple].zonal_stats((self.raster_path[round(sple/2)],\
+					 self.list_band_outraster[round(sple/2)]))
 					
 					# Add the validation shapefile
 					self.valid_shp.append([sample_rd[sple].vector_val, kwargs['fieldname'], kwargs['class']])
@@ -483,12 +516,13 @@ class Processing(object):
 						f.write('The class 1 {0}\n'.format(self.decis[th_seath].threshold[0]))
 	
 				i_s = 20
-			except:
+			except Exception as e:
+				self.logger.error("Error 519 : {0}".format(e))
 				i_s = i_s + 1
 
 		# Stop the processus if there is not found a valid threshold
-		if i_s != 20:
-			self.logger.error('Problem durinf the sample processing.')
+		if i_s != 20 or True:
+			self.logger.error('Problem during the sample processing.')
 			sys.exit(1)
 	
 	def i_sample_rf(self):
@@ -504,6 +538,8 @@ class Processing(object):
 
 		self.logger.debug("Taille : {0}".format(len(self.raster_path)))
 		self.logger.debug("Rasters : {0}".format(self.raster_path))
+		self.logger.debug(self.folder_processing)
+
 
 		# Extract value mean from polygons
 		for sple in range(len(self.sample_name) * 2):
@@ -511,16 +547,22 @@ class Processing(object):
 			kwargs['fieldname'] = self.fieldname_args[sple]
 			kwargs['class'] = self.class_args[sple]
 
-			self.logger.debug("Creation Sample : {0} - {1} - {2}".format( \
+			self.logger.debug("Creation Sample : {0} - {1} - {2}".format(\
 				self.sample_name[round(sple/2)], self.path_area, self.list_nb_sample[round(sple/2)]) )
 
 			sample_rd[sple] = Sample(self.sample_name[round(sple/2)], self.path_area,\
 			 self.list_nb_sample[round(sple/2)])
 
+			sample_rd[sple].clip_vector("{0}/{1}".format(self.folder_processing, os.path.basename(\
+				os.path.dirname(self.sample_name[round(sple/2)]))))
+			
+			sample_rd[sple].vector_data()
+			sample_rd[sple].output_folder = self.folder_processing
+		
 			self.logger.debug("kwargs : {0}".format(kwargs))
 
 			sample_rd[sple].create_sample(**kwargs)
-			
+
 			# Add the validation shapefile
 			self.valid_shp.append([sample_rd[sple].vector_val, kwargs['fieldname'], kwargs['class']])
 
@@ -535,20 +577,19 @@ class Processing(object):
 
 				self.logger.debug("item : {0}".format(items[lbo][1]))
 				sample_rd[sple].zonal_stats(items[lbo][1]["PATH"], items[lbo][1]["BAND"], **kwargs)
-			
 
 			# To convert the dictionnary in a list
 			# for key, value in sample_rd[sple].stats_dict.iteritems():
 			for key, value in sample_rd[sple].stats_dict.items():
 				# X_rf.append([-10000 if (math.isnan(x) or math.isinf(x)) else x for x in value])
 				# To set the grassland class of the RPG and PIAO (same class)			
-				if sple == 2:
+				if sple == 2 :
 					# y_rf.append(1)
 					pass
-				elif sple == 3:
+				elif sple == 3 :
 					# y_rf.append(4)
 					pass
-				else:
+				else :
 					y_rf.append(sple)
 					X_rf.append([-10000 if (x is None or math.isnan(x) or math.isinf(x)) else x for x in value])
 		
@@ -568,7 +609,7 @@ class Processing(object):
 			os.remove(file_feat_import)
 		
 		with open(file_feat_import, "w") as f_out :
-			f_out.write(str(importance))
+			f_out.write("{0}".format(importance))
 		
 		# Print in a file decision tree
 		file_decisiontree = "{0}/Decision_tree.dot".format(os.path.dirname(items[0][1]["PATH"]))
@@ -598,7 +639,15 @@ class Processing(object):
 		multi_process_var = [] # Multi processing variable
 		  
 		# Extract final cartography
-		out_carto = Segmentation.Segmentation(self.path_segm, self.path_area) 
+		out_carto = Segmentation.Segmentation(self.path_segm, self.path_area)
+
+		self.logger.debug(self.path_segm)
+		self.logger.debug(self.path_area)
+
+		out_carto.clip_vector("{0}/{1}".format(\
+			self.folder_processing, os.path.basename(os.path.dirname(self.path_segm))))
+		out_carto.vector_data()
+
 		out_carto.output_file = self.output_name_moba
 		out_carto.out_class_name = self.in_class_name
 
@@ -665,6 +714,7 @@ class Processing(object):
 			# Compute the biomass and density distribution
 			# Use 'out_carto.out_threshold' to know predicted in the segmentation class
 			out_carto.out_threshold["PREDICTION"] = predicted_rf
+			
 			# In the compute_biomass_density function, this variable used normally to define 
 			# threshold of the classification with SEATH method is initialized
 			out_carto.compute_biomass_density('RF')
@@ -703,6 +753,9 @@ class Processing(object):
 		opt['Remove'] = 1
 		rpg_tif = Vector(self.sample_name[0], self.path_area, **opt)
 
+		rpg_tif.clip_vector(os.path.dirname(self.sample_name[0]))
+		rpg_tif.vector_data()
+
 		kwargs['choice_nb_b'] = 1
 		out_carto.stats_rpg_tif = out_carto.zonal_stats_pp(rpg_tif.layer_rasterization(self.path_ortho, 'CODE_GROUP', **kwargs))
 
@@ -870,14 +923,20 @@ class Processing(object):
 					val[0] = val[0][:-4] + '_.shp'
 					val[1] = opt['fieldname']
 					val[2] = opt['class']
+
+					sample_val.clip_vector(sample_val.vector_folder)
+					sample_val.vector_data()
+					
 					# Complete the new shapefile
 					sample_val.fill_sample(val[0], 0, **opt)
+					
 					# Second time, merge the validate shapefile
-					if class_validate == 0:
-						process_tocall_merge =  ['ogr2ogr', '-overwrite', complete_validate_shp, val[0]]
-					elif class_validate > 0:
-						process_tocall_merge =  ['ogr2ogr', '-update', '-append', complete_validate_shp, \
-												 val[0], '-nln', os.path.basename(str(complete_validate_shp[:-4]))]
+					if class_validate == 0 :
+						process_tocall_merge = ['ogr2ogr', '-overwrite', complete_validate_shp, val[0]]
+					elif class_validate > 0 :
+						process_tocall_merge = ['ogr2ogr', '-update', '-append', complete_validate_shp, \
+										val[0], '-nln', os.path.basename(str(complete_validate_shp[:-4]))]
+					
 					subprocess.call(process_tocall_merge)
 
 					sample_val.logger.close()
diff --git a/RasterSat_by_date.py b/RasterSat_by_date.py
index 77416dc150bccb0672a04f6436435e7704da02f2..84ee4ab2f4a2601030d94b34b614e4c5dc02eb13 100644
--- a/RasterSat_by_date.py
+++ b/RasterSat_by_date.py
@@ -19,15 +19,12 @@
 
 import os, sys
 import math, subprocess
-try :
-	import gdal
-except :
-	from osgeo import gdal
+
+import osgeo.gdal
 
 import Outils
 import Satellites
 import numpy as np
-from libtiff import TIFF
 
 class RasterSat_by_date(object):
 	"""
@@ -58,6 +55,7 @@ class RasterSat_by_date(object):
 		"""
 
 		self.logger = Outils.Log("Log", "RasterSat_by_date")
+		
 		self._class_archive = class_archive
 		self._big_folder = big_folder
 		
@@ -99,12 +97,13 @@ class RasterSat_by_date(object):
 			@param dst_data: Output path
 			@type dst_data: str
 		"""
-		
+
 		if os.path.exists(dst_data):
 			os.remove(dst_data)
 		
 		# Select process
 		if vrt_translate == 'vrt':
+			
 			# Verify input data
 			if type(src_data) is not np.ndarray and type(src_data) is not list:
 				self.logger.error('VRT file ! The data source should be composed of several data. A list minimal of 2 dimensions')
@@ -112,51 +111,39 @@ class RasterSat_by_date(object):
 				
 			self.logger.info('Build VRT file')
 			if not os.path.exists(dst_data):
-				process_tocall = ['gdalbuildvrt', '-srcnodata', '-10000', dst_data]
-				
-			# Copy rasters
-			for cp_image in src_data:
-				process_tocall.append(cp_image)
+				options = osgeo.gdal.BuildVRTOptions(srcNodata=-10000)
+				osgeo.gdal.BuildVRT(dst_data, src_data, options=options )
 				
 		elif vrt_translate == 'translate':
 			# Verify input data
 			try :
 				src_data = str(src_data)
-			except:# if type(src_data) is not str:
+			except:
 				self.logger.error('Geotiff file ! The data source should be composed of path file. A character string !')
 				sys.exit(1)
 				
 			self.logger.info('Build Geotiff file')
 			if not os.path.exists(dst_data):
-				process_tocall = ['gdal_translate', '-a_nodata', '-10000', src_data, dst_data]
-			
-		# Launch vrt process
-		subprocess.call(process_tocall)
+				options = osgeo.gdal.TranslateOptions(noData=-10000)
+				osgeo.gdal.Translate(dst_data, src_data, options=options)			
 		
 	def mosaic_by_date(self, date):
 		"""
 			Function to merge images of the same date in a image group : func:`group_by_date`.
 		"""
 		
-		traitement_folder = "{0}/{1}".format(self._class_archive._folder, self._big_folder)
-	
-		# Create the processing images folder if not exists
-		if not os.path.exists(traitement_folder):
-			os.mkdir(traitement_folder)
-
 		# Matrix multi images for a single date 
 		group = self.group_by_date(date) # Every images [year, month, day, multispectral image, cloud image]
 		group_ = np.transpose(np.array(group)) # Transpose matrix to extract path of images
 
 		# Create a folder with images year if it doesn't exist
-		index_repertory_img = self._class_archive._captor
-		sensor_folder = "{0}/{1}".format(traitement_folder, self._class_archive._captor)
+		mosaic_folder = "{0}/Mosaic".format(self._big_folder)
 		
-		if not os.path.exists(sensor_folder):
-			os.mkdir(sensor_folder)
+		if not os.path.exists(mosaic_folder):
+			os.mkdir(mosaic_folder)
 
 		# Create a folder with images date if it doesn't exist
-		date_folder = "{0}/{1}".format(sensor_folder, "".join(date))
+		date_folder = "{0}/{1}".format(mosaic_folder, "".join(date))
 
 		if not os.path.exists(date_folder):
 			os.mkdir(date_folder)
@@ -167,13 +154,13 @@ class RasterSat_by_date(object):
 		vrt_out = "{0}/{1}{2}.VRT".format(date_folder,self._class_archive._captor,os.path.basename(date_folder)) 
 
 		if not os.path.exists(vrt_out):
-			self.vrt_translate_gdal('vrt', group_[3], vrt_out)
-		
+			self.vrt_translate_gdal('vrt', list(group_[3]), vrt_out)
+
 		# Cloud VRT outfile
 		vrtcloud_out = "{0}/{1}{2}_NUA.VRT".format(date_folder,self._class_archive._captor,os.path.basename(date_folder)) 
 
 		if not os.path.exists(vrtcloud_out):
-			self.vrt_translate_gdal('vrt', group_[4], vrtcloud_out)
+			self.vrt_translate_gdal('vrt', list(group_[4]), vrtcloud_out)
 		
 		########################### Build Geotiff file with data images required ###########################
 		
@@ -203,13 +190,13 @@ class RasterSat_by_date(object):
 		"""
 		
 		# Load Gdal's drivers
-		gdal.AllRegister()
+		osgeo.gdal.AllRegister()
 
 		self.logger.debug("Img : {0}".format(img))
 		
 		# Loading input raster
 		self.logger.info('Loading input raster : {0}'.format(os.path.basename(img)))
-		in_ds = gdal.Open(img, gdal.GA_ReadOnly)
+		in_ds = osgeo.gdal.Open(img, osgeo.gdal.GA_ReadOnly)
 
 		# if it doesn't exist
 		if in_ds is None:
@@ -315,7 +302,7 @@ class RasterSat_by_date(object):
 		mask = np.greater(data_cloud, -10000)
 
 		# If True, -10000 (NaN) else compute mathematical operation
-		ndvi = np.choose(mask, (-10000, eval('(data[canal_PIR]-data[canal_R])') / eval('(data[canal_PIR]+data[canal_R])')))
+		ndvi = np.choose(mask, (-10000, eval('(data[canal_PIR]-data[canal_R])/(data[canal_PIR]+data[canal_R])')))
 		
 		# Outfile name
 		img_ndvi = "{0}_ndvi.TIF".format(self.cloudiness_pourcentage[0][:-4])
@@ -354,7 +341,7 @@ class RasterSat_by_date(object):
 		else:
 			nbband = in_ds.RasterCount 
 			
-		driver = gdal.GetDriverByName('GTiff') 
+		driver = osgeo.gdal.GetDriverByName('GTiff') 
 
 		if os.path.exists(out_raster_path):
 			os.remove(out_raster_path)
@@ -363,7 +350,7 @@ class RasterSat_by_date(object):
 
 			e = 0	
 			# Create outfile
-			self.out_ds = driver.Create(out_raster_path, in_ds.RasterXSize, in_ds.RasterYSize, nbband, gdal.GDT_Float32)
+			self.out_ds = driver.Create(out_raster_path, in_ds.RasterXSize, in_ds.RasterYSize, nbband, osgeo.gdal.GDT_Float32)
 
 			if self.out_ds is None:
 				self.logger.error('Could not create {0}'.format(os.path.basename(out_raster_path)))
@@ -388,7 +375,7 @@ class RasterSat_by_date(object):
 			self.out_ds.SetProjection(def_projection)
 
 		else:
-			self.out_ds = gdal.Open(out_raster_path, gdal.GA_ReadOnly)
+			self.out_ds = osgeo.gdal.Open(out_raster_path, osgeo.gdal.GA_ReadOnly)
 			
 		return nbband, e
 	
diff --git a/Rpg.py b/Rpg.py
index 42114445901936f6dadfe893629dee7646e49cc4..7c09d259c417178237b8416d8f25ed75516a5423 100644
--- a/Rpg.py
+++ b/Rpg.py
@@ -62,7 +62,7 @@ class Rpg(Vector):
         self.logger = Outils.Log("Log", "Rpg")
 
         Vector.__init__(self, used, cut)
-        
+       
         self.rm_dupli = defaultdict(list)
         self.head_in_read = []
         self.min_size = 1
@@ -74,7 +74,13 @@ class Rpg(Vector):
         """
 
         ## The output shapefile if it exists
-        self.vector_used = "{0}/MONO_{1}".format(self.vector_folder, self.vector_name)
+        self.vector_used = "{0}/MONO_{1}".\
+        format( os.path.dirname(self.vector_used), self.vector_name)
+
+        # self.logger.debug("self.vector_used : {0}".format(self.vector_used))
+        # self.logger.debug("self.vector_name : {0}".format(self.vector_name))
+        
+        # raise("TOP")
 
         if not os.path.exists(self.vector_used):
             
@@ -83,11 +89,11 @@ class Rpg(Vector):
             # Projection
             # Import input shapefile projection
             srsObj = shp_ogr.GetSpatialRef()
+            
             # Conversion to syntax ESRI
             srsObj.MorphToESRI()
             
             # Create output file
-
             try:
                 out_ds = self.data_source.GetDriver().CreateDataSource(self.vector_used)
 
@@ -109,9 +115,9 @@ class Rpg(Vector):
                 while in_feature:
                     
                     id_rpg = str(in_feature.GetField(self.field_names[0]))
+                   
                     # Create a existing polygons in modified rpg list  
                     # with minimum size greater than or equal to 1 ha
-                   
                     if self.rm_dupli[id_rpg] and float(self.rm_dupli[id_rpg][2].replace(',','.')) >= self.min_size:
                         # Add .replace(',','.') if the input RPG contains comma instead of point
                         
@@ -142,19 +148,17 @@ class Rpg(Vector):
                 self.logger.error('Error during creating file : {0}'.format(e))
                 sys.exit(1)           
                  
-       
-    def mono_rpg(self):        
+    def mono_rpg(self, path_rpg):        
         """
             Function to extract no duplicated crops.
-        """
-        
+        """        
         # Table from the RPG input shapefile name
-        file_tab = os.path.split(self.vector_used)[1].split('-')[len(os.path.split(self.vector_used)[1].split('-'))-1].split('_')
+        file_tab = os.path.split(path_rpg)[1].split('-')[len(os.path.split(path_rpg)[1].split('-'))-1].split('_')
 
         # Define the input csv file from RPG input shapefile
-        myfile = os.path.split(self.vector_used)[0] + '/' + file_tab[0] + '-' + file_tab[1] + '-GROUPES-CULTURE_' + file_tab[2] +\
-                                                                '_' + file_tab[3][:-3] + 'csv'
-
+        myfile = "{0}/{1}-{2}-GROUPES-CULTURE_{3}_{4}.csv"\
+        .format(os.path.dirname(path_rpg), file_tab[0], file_tab[1], file_tab[2], file_tab[3][:-4])
+        
         in_read = []
 
         with open(myfile, 'r') as my_file :
diff --git a/Sample.py b/Sample.py
index ae7a078077328fc897e60489a076d6ffc556d6f7..cb2d2f12d259bd6f029d8c00ce97afa3388d5f1c 100644
--- a/Sample.py
+++ b/Sample.py
@@ -77,6 +77,11 @@ class Sample(Vector):
 		self.logger.debug("kw_field : {0}".format(kw_field))
 		self.logger.debug("kw_classes : {0}".format(kw_classes))
 
+		self.output_folder = "{0}/{1}".format(self.output_folder, os.path.basename(os.path.dirname(self.vector_used)))
+		
+		if not os.path.exists(self.output_folder) :
+			os.mkdir(self.output_folder)
+
 		# If the users want select polygons with a certain class name
 		if kw_field and kw_classes:
 			# The random sample with class name selected only
@@ -88,7 +93,8 @@ class Sample(Vector):
 		self.logger.debug("random_sample : {0}".format(random_sample))
 
 		# Output shapefile of the sample's polygons (path)
-		self.vector_used = "{0}_{1}_rd.shp".format(self.vector_used[:-4], kw_classes.replace(',','').replace(' ',''))
+		# self.vector_used = "{0}_{1}_rd.shp".format(self.vector_used[:-4], kw_classes.replace(',','').replace(' ',''))
+		self.vector_used = "{0}/{1}_{2}_rd.shp".format(self.output_folder, os.path.basename(self.vector_used)[:-4],kw_classes.replace(',','').replace(' ',''))
 
 		self.logger.debug("vector_used : {0}".format(self.vector_used))
 
diff --git a/Segmentation.py b/Segmentation.py
index 5bc3b462a3d4222990779b24c6ab5ec744589e3e..de9a55507e205f55989117c7e4af9d5050341450 100644
--- a/Segmentation.py
+++ b/Segmentation.py
@@ -284,6 +284,7 @@ class Segmentation(Vector):
 					distri_bio.append(b)
 				else:
 					distri_den.append(b)
+		
 		elif method == 'RF':
 
 			distri = [v[1:] for k, v in self.stats_dict.items() if not self.out_threshold["PREDICTION"][k] in [0,6,7]]
@@ -291,13 +292,22 @@ class Segmentation(Vector):
 			distri_bio = []
 			distri_den = []
 
-			for b in distri:
-				if self.out_threshold["PREDICTION"][distri.index(b)] in [1,2,8,9,10]\
-				 and b[len(b)-1] != -10000 and b[len(b)-1] < 1:
+
+
+			# for b in distri:
+			for idx, b in enumerate(distri):
+				if self.out_threshold["PREDICTION"][idx] in [1,2,8,9,10]\
+				 and b[-1] != -10000 and b[-1] < 1:
 					distri_bio.append(b)
 				else:
 					distri_den.append(b)
 
+
+		self.logger.debug(len(distri))
+		self.logger.debug(len(distri_bio))
+		self.logger.debug(len(distri_den))
+		raise(BaseException("Error"))
+
 		# Transpose table		
 		t_distri_bio = list(map(list, zip(*distri_bio)))
 		t_distri_den = list(map(list, zip(*distri_den)))
diff --git a/Toolbox.py b/Toolbox.py
index d12d207ae77b4195762570a9a1ab37b1a29311cb..e5ccfc9012a239465e7f04dc6e4ef52534a2dbaf 100644
--- a/Toolbox.py
+++ b/Toolbox.py
@@ -22,6 +22,7 @@ from datetime import date
 import os, sys, subprocess, glob
 import numpy as np
 import json
+import osgeo.gdal
 import Constantes
 import Outils
 
@@ -40,8 +41,9 @@ class Toolbox(object):
 		"""
 			Create a new 'Toolbox' instance
 		"""
-		self.image = ''
-		self.vect = ''
+		self.image = None
+		self.vect = None
+		self.output = None
 		self.logger = Outils.Log("Log", "Toolbox")
 	
 	def clip_raster(self, **kwargs):
@@ -56,30 +58,21 @@ class Toolbox(object):
 		"""
 		
 		rm_rast = kwargs['rm_rast'] if kwargs.get('rm_rast') else 0
-		outclip = os.path.split(str(self.image))[0] + '/Clip_' + os.path.split(str(self.image))[1]
+
+		if self.output is None :
+			outclip = "{0}/Clip_{1}".format(os.path.dirname(self.image), os.path.basename(self.image))
+		else :
+			if not os.path.exists(os.path.dirname(self.output)) :
+				os.mkdir(os.path.dirname(self.output))
+
+			outclip = self.output
+
+		options = osgeo.gdal.WarpOptions(dstNodata=-10000, cropToCutline=True,\
+		cutlineDSName=self.vect,outputType=osgeo.gdal.GDT_Int16 , format='GTiff')
+
 		if not os.path.exists(outclip) or rm_rast == 1:
-			self.logger.info("Raster clip of {0}".format(os.path.split(str(self.image))[1]))
-			# Command to clip a raster with a shapefile by Gdal
-			process_tocall_clip = ['gdalwarp', '-overwrite', '-dstnodata', '-10000', '-q', '-cutline', self.vect, '-crop_to_cutline', '-of', 'GTiff', self.image, outclip]
-			# This is a trick to remove warning with the polygons that touch themselves
-			try:
-				r = subprocess.call(process_tocall_clip)
-				if r == 1:
-					sys.exit(1)
-					
-			except SystemExit:
-				self.logger.info("Dissolve vector cut of the validation !")
-				
-				vect_2 = self.vect[:-4] + '_v2.shp'
-				preprocess_tocall = 'ogr2ogr -overwrite ' + vect_2 + ' ' + self.vect + ' -dialect sqlite -sql "SELECT ST_Union(geometry), * FROM ' + \
-										os.path.basename(self.vect)[:-4] +'"'
-				os.system(preprocess_tocall)
-				self.logger.info("Raster clip of {0}".format(os.path.split(str(self.image))[1]))
-				process_tocall_clip = ['gdalwarp', '-overwrite', '-dstnodata', '-10000', '-q', '-cutline', vect_2, '-crop_to_cutline', '-of', 'GTiff', self.image, outclip]
-				subprocess.call(process_tocall_clip)
-				
-				for rem in glob.glob(vect_2[:-4] + '*'):
-					os.remove(rem)
+			self.logger.info("Raster clip of {0}".format(os.path.basename(self.image)))
+			osgeo.gdal.Warp(outclip, self.image, options=options)
 					
 		return outclip
 
@@ -113,8 +106,14 @@ class Toolbox(object):
 		 # if tab_cloud = 0 then True else False / Mask cloud
 		cloud_true = (tab_cloud == 0)
 
+		self.logger.debug('Mémoire cloud_true : {0}'.format(sys.getsizeof(cloud_true)))
+
 		# Account to tab_cloud if != 0 => Sum of True. (Dimension X*Y)
 		account_cloud = np.sum(cloud_true, axis=2) 
+
+		cloud_true = None
+
+		self.logger.debug('Mémoire cloud_true : {0}'.format(sys.getsizeof(cloud_true)))
 		
 		# For the ndvi stats
 
@@ -146,9 +145,12 @@ class Toolbox(object):
 				for d in range(len(tab_date)):
 					mask_data = np.ma.masked_values(i_date, d)
 					i_date = mask_data.filled(date(int(tab_date[d][0]), int(tab_date[d][1]), int(tab_date[d][2])).toordinal()) # Date = day for year =1 day = 1 and month = 1
+					mask_data = None
 					
 				account_stats.append(i_date) # Add the date table
-				
+
+		mask_cloud = None
+		tab_ndvi_masked = None
 		return account_stats, account_cloud
 	
 	def check_proj(self):
diff --git a/Vector.py b/Vector.py
index 615530b923f14bd03dbdcdaf18190f60fec649a0..4043d2151cfd60e10bb83a3aff6e2f2d9a072319 100644
--- a/Vector.py
+++ b/Vector.py
@@ -20,11 +20,12 @@
 import os, sys
 import subprocess
 import numpy as np
+import osgeo
 
-try :
-	import ogr, gdal
-except :
-	from osgeo import ogr, gdal
+# try :
+# 	import ogr, gdal
+# except :
+# 	from osgeo import ogr, gdal
 	
 import rasterstats
 from collections import *
@@ -76,45 +77,49 @@ class Vector():
 		self.vector_cut = cut
 		self.vector_used = used
 		self.remove_shp = opt['Remove'] if opt.get('Remove') else 0
-
-		self.clip_vector()
-
-		self.vector_data()
 	
-		# List of field name
-		self.field_names = [self.data_source.GetLayer().GetLayerDefn().GetFieldDefn(l).GetName() \
-					   for l in range(self.data_source.GetLayer().GetLayerDefn().GetFieldCount())]
-			
 		self.stats_dict = defaultdict(list)
 		self.raster_ds = None
 	
-	def clip_vector(self):
+	def clip_vector(self, output_folder):
 		"""
 			Function to clip a vector with a vector
 		"""	
-		
-		outclip = "{0}/Clip_{1}".format(self.vector_folder, self.vector_name)
 
+		if not os.path.exists(output_folder) :
+			os.makedirs(output_folder)
+		
+		outclip = "{0}/Clip_{1}".format(output_folder, self.vector_name)
+		
 		if not os.path.exists(outclip) or self.remove_shp == 1:
 			self.logger.info('Clip of {0}'.format(self.vector_name))
+
 			# Command to clip a vector with a shapefile by OGR
 			process_tocall_clip =\
-			 ['ogr2ogr', '-overwrite', '-skipfailures', outclip, self.vector_used, '-clipsrc', self.vector_cut]
+			 ['ogr2ogr', '-overwrite',\
+			  '-skipfailures', outclip, self.vector_used, \
+			  '-clipsrc', self.vector_cut]
+
 			subprocess.call(process_tocall_clip)
-		
+
 		# Replace input filename by output filename
 		self.vector_used = outclip
 		
 	def vector_data(self):
 		"""
 			Function to extract vector layer information 
-		"""  
+		"""
+
 		try:
-			self.data_source = ogr.GetDriverByName('ESRI Shapefile').Open(self.vector_used, 0)
+			self.data_source = osgeo.ogr.GetDriverByName('ESRI Shapefile').Open(self.vector_used, 0)
 			self.logger.info('Shapefile opening : {0}'.format(self.data_source.GetLayer().GetName()))
 		except :
 			self.logger.error('Could not open file')
-			sys.exit(1)		
+			sys.exit(1)	
+
+		# List of field name
+		self.field_names = [self.data_source.GetLayer().GetLayerDefn().GetFieldDefn(l).GetName() \
+						for l in range(self.data_source.GetLayer().GetLayerDefn().GetFieldCount())]	
 
 	def close_data(self):
 		"""
@@ -212,11 +217,12 @@ class Vector():
 			data_raster = raster_info[0][0]
 		else:
 			data_raster = raster_info[0]
+
 		info_out = example_raster.create_empty_raster(valid_raster, data_raster, raster_info[1])
 		self.raster_ds = example_raster.out_ds
 		
 		# Virtual rasterize the vector 
-		pt_rast = gdal.RasterizeLayer(self.raster_ds, [1], self.data_source.GetLayer(), \
+		pt_rast = osgeo.gdal.RasterizeLayer(self.raster_ds, [1], self.data_source.GetLayer(), \
 									  options=["ATTRIBUTE=" + str(attribute_r)])
 		
 		if pt_rast != 0:
diff --git a/Vhrs.py b/Vhrs.py
index b35516ad823654b7436e15fbe2b905b479393c0c..99c3102dd8e2d9558c3be15a3442ed7c142fad88 100644
--- a/Vhrs.py
+++ b/Vhrs.py
@@ -20,8 +20,11 @@
 import os
 import subprocess
 import Outils
+import otbApplication as otb
+
 from multiprocessing import Process
 
+
 class Vhrs():
 	
 	"""
@@ -49,7 +52,7 @@ class Vhrs():
 		self.logger = Outils.Log('Log', "VHRS")
 		
 		self.logger.info('SFS image')
-		self.out_sfs = self._imag[:-4] + '_sfs.TIF'
+		self.out_sfs = "{0}_sfs.TIF".format(self._imag[:-4])
 		
 		if not os.path.exists(self.out_sfs):
 			self.logger.info("SFS image doesn't exist !")
@@ -60,7 +63,7 @@ class Vhrs():
 				p_sfs.join()
 			
 		self.logger.info('Haralick image')
-		self.out_haralick = self._imag[:-4] + '_haralick.TIF'
+		self.out_haralick = "{0}_haralick.TIF".format(self._imag[:-4])
 
 		if not os.path.exists(self.out_haralick):
 			self.logger.info("Haralick image doesn't exist !")
@@ -76,63 +79,65 @@ class Vhrs():
 				p_har.join()
 		
 	def sfs_texture_extraction(self):
-		
 		"""
-		Function to compute SFS texture image with OTB command line.
-			:Example: otbcli_SFSTextureExtraction -in qb_RoadExtract.tif -channel 1 -parameters.spethre 50.0 -parameters.spathre 100 -out SFSTextures.tif
-			
-			- OTB help :
-				* in : Input Image
-				* channel : Selected Channel
-				* parameters : Texture feature parameters. This group of parameters allows to define SFS texture parameters. The available texture features are SFS’Length, SFS’Width, SFS’PSI, SFS’W-Mean, SFS’Ratio and SFS’SD. They are provided in this exact order in the output image.
-					- parameters.spethre : Spectral Threshold
-					- parameters.spathre : Spatial Threshold
-					- parameters.nbdir : Number of Direction
-					- parameters.alpha : Alpha
-					- parameters.maxcons : Ratio Maximum Consideration Number
-				* out : Feature Output Image
-				
-			Source : http://otbcb.readthedocs.org/en/latest/Applications/app_SFSTextureExtraction.html
+			Function to compute OTB SFS texture image				
+				- OTB help :
+					* in : Input Image
+					* channel : Selected Channel
+					* parameters : Texture feature parameters. This group of parameters allows to define SFS texture parameters. The available texture features are SFS’Length, SFS’Width, SFS’PSI, SFS’W-Mean, SFS’Ratio and SFS’SD. They are provided in this exact order in the output image.
+						- parameters.spethre : Spectral Threshold
+						- parameters.spathre : Spatial Threshold
+						- parameters.nbdir : Number of Direction
+						- parameters.alpha : Alpha
+						- parameters.maxcons : Ratio Maximum Consideration Number
+					* out : Feature Output Image
 		"""
 			  
-		process_tocall = ['otbcli_SFSTextureExtraction', '-in', self._imag, '-channel', '2', '-parameters.spethre', '50.0', \
-						  '-parameters.spathre', '100', '-out', self.out_sfs]
-		
-		self.logger.info(process_tocall)
-		subprocess.call(process_tocall) 
+		otb_SFS = otb.Registry.CreateApplication("SFSTextureExtraction")
+		otb_SFS.SetParameterString("in", self._imag)
+		otb_SFS.SetParameterInt("channel", 2)
+		otb_SFS.SetParameterInt("parameters.spethre", 50)
+		otb_SFS.SetParameterInt("parameters.spathre", 100)
+		otb_SFS.SetParameterString("out", self.out_sfs)
+
+		otb_SFS.ExecuteAndWriteOutput()
+
+		self.logger.info('SFS image created.')
 		
 	def haralick_texture_extraction(self, texture_choice):
-		
 		"""
-		Function to compute Haralick texture image with OTB command line.
-			:Example: otbcli_HaralickTextureExtraction -in qb_RoadExtract.tif -channel 2 -parameters.xrad 3 -parameters.yrad 3 -texture simple -out HaralickTextures.tif
+			Function to compute OTB Haralick texture image
+				- OTB help :
+					* in : Input Image
+					* channel : Selected Channel
+					* Texture feature parameters : This group of parameters allows to define texture parameters.
+						- X Radius : X Radius
+						- Y Radius : Y Radius
+						- X Offset : X Offset
+						- Y Offset : Y Offset
+					* Image Minimum : Image Minimum
+					* Image Maximum : Image Maximum
+					* Histogram number of bin : Histogram number of bin 
+					* Texture Set Selection Choice of The Texture Set Available choices are :
+						- Simple Haralick Texture Features: This group of parameters defines the 8 local Haralick texture feature output image. The image channels are: Energy, Entropy, Correlation, Inverse Difference Moment, Inertia, Cluster Shade, Cluster Prominence and Haralick Correlation
+						- Advanced Texture Features: This group of parameters defines the 9 advanced texture feature output image. The image channels are: Mean, Variance, Sum Average, Sum Variance, Sum Entropy, Difference of Entropies, Difference of Variances, IC1 and IC2
+						- Higher Order Texture Features: This group of parameters defines the 11 higher order texture feature output image. The image channels are: Short Run Emphasis, Long Run Emphasis, Grey-Level Nonuniformity, Run Length Nonuniformity, Run Percentage, Low Grey-Level Run Emphasis, High Grey-Level Run Emphasis, Short Run Low Grey-Level Emphasis, Short Run High Grey-Level Emphasis, Long Run Low Grey-Level Emphasis and Long Run High Grey-Level Emphasis
+					* out : Feature Output Image 
+					
+				Source : http://otbcb.readthedocs.org/en/latest/Applications/app_HaralickTextureExtraction.html
 			
-			- OTB help :
-				* in : Input Image
-				* channel : Selected Channel
-				* Texture feature parameters : This group of parameters allows to define texture parameters.
-					- X Radius : X Radius
-					- Y Radius : Y Radius
-					- X Offset : X Offset
-					- Y Offset : Y Offset
-				* Image Minimum : Image Minimum
-				* Image Maximum : Image Maximum
-				* Histogram number of bin : Histogram number of bin 
-				* Texture Set Selection Choice of The Texture Set Available choices are :
-					- Simple Haralick Texture Features: This group of parameters defines the 8 local Haralick texture feature output image. The image channels are: Energy, Entropy, Correlation, Inverse Difference Moment, Inertia, Cluster Shade, Cluster Prominence and Haralick Correlation
-					- Advanced Texture Features: This group of parameters defines the 9 advanced texture feature output image. The image channels are: Mean, Variance, Sum Average, Sum Variance, Sum Entropy, Difference of Entropies, Difference of Variances, IC1 and IC2
-					- Higher Order Texture Features: This group of parameters defines the 11 higher order texture feature output image. The image channels are: Short Run Emphasis, Long Run Emphasis, Grey-Level Nonuniformity, Run Length Nonuniformity, Run Percentage, Low Grey-Level Run Emphasis, High Grey-Level Run Emphasis, Short Run Low Grey-Level Emphasis, Short Run High Grey-Level Emphasis, Long Run Low Grey-Level Emphasis and Long Run High Grey-Level Emphasis
-				* out : Feature Output Image 
-				
-			Source : http://otbcb.readthedocs.org/en/latest/Applications/app_HaralickTextureExtraction.html
-		
-		:param texture_choice: Order texture choice -> Simple / Advanced / Higher
-		:type texture_choice: str
+			@param texture_choice: Order texture choice -> Simple / Advanced / Higher
+			@type texture_choice: str
 		"""
 		
-		process_tocall =  ['otbcli_HaralickTextureExtraction', '-in', self._imag, '-channel', '2', '-parameters.xrad', '3', \
-						   '-parameters.yrad', '3', '-texture', texture_choice, '-out', self.out_haralick]
-		
-		self.logger.info(process_tocall)
-		subprocess.call(process_tocall) 
-		
\ No newline at end of file
+		otb_Haralick = otb.Registry.CreateApplication("HaralickTextureExtraction")
+		otb_Haralick.SetParameterString("in", self._imag)
+		otb_Haralick.SetParameterInt("channel", 2)
+		otb_Haralick.SetParameterInt("parameters.xrad", 3)
+		otb_Haralick.SetParameterInt("parameters.yrad", 3)
+		otb_Haralick.SetParameterString("texture", texture_choice)
+		otb_Haralick.SetParameterString("out", self.out_haralick)
+
+		otb_Haralick.ExecuteAndWriteOutput()
+
+		self.logger.info('Haralick image created.')
\ No newline at end of file