diff --git a/Modules/Applications/AppDimensionalityReduction/cbDimensionalityReduction.cxx b/Modules/Applications/AppDimensionalityReduction/cbDimensionalityReduction.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..de6408ebcb313fc3badfa27a585b125ecbb5964d
--- /dev/null
+++ b/Modules/Applications/AppDimensionalityReduction/cbDimensionalityReduction.cxx
@@ -0,0 +1,261 @@
+/*=========================================================================
+
+  Program:   ORFEO Toolbox
+  Language:  C++
+  Date:      $Date$
+  Version:   $Revision$
+
+
+  Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+  See OTBCopyright.txt for details.
+
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notices for more information.
+
+=========================================================================*/
+#include "otbWrapperApplication.h"
+#include "otbWrapperApplicationFactory.h"
+
+#include "itkUnaryFunctorImageFilter.h"
+#include "otbChangeLabelImageFilter.h"
+#include "otbStandardWriterWatcher.h"
+#include "otbStatisticsXMLFileReader.h"
+#include "otbShiftScaleVectorImageFilter.h"
+#include "ImageDimensionalityReductionFilter.h"
+#include "otbMultiToMonoChannelExtractROI.h"
+#include "otbImageToVectorImageCastFilter.h"
+#include "DimensionalityReductionModelFactory.h"
+
+namespace otb
+{
+namespace Functor
+{
+/**
+ * simple affine function : y = ax+b
+ */
+template<class TInput, class TOutput>
+class AffineFunctor
+{
+public:
+  typedef double InternalType;
+  
+  // constructor
+  AffineFunctor() : m_A(1.0),m_B(0.0) {}
+  
+  // destructor
+  virtual ~AffineFunctor() {}
+  
+  void SetA(InternalType a)
+    {
+    m_A = a;
+    }
+  
+  void SetB(InternalType b)
+    {
+    m_B = b;
+    }
+  
+  inline TOutput operator()(const TInput & x) const
+    {
+    return static_cast<TOutput>( static_cast<InternalType>(x)*m_A + m_B);
+    }
+private:
+  InternalType m_A;
+  InternalType m_B;
+};
+  
+}
+
+namespace Wrapper
+{
+
+class CbDimensionalityReduction : public Application
+{
+public:
+  /** Standard class typedefs. */
+  typedef CbDimensionalityReduction             Self;
+  typedef Application                   Superclass;
+  typedef itk::SmartPointer<Self>       Pointer;
+  typedef itk::SmartPointer<const Self> ConstPointer;
+
+  /** Standard macro */
+  itkNewMacro(Self);
+
+  itkTypeMacro(CbDimensionalityReduction, otb::Application);
+
+  /** Filters typedef */
+  typedef UInt8ImageType                                                                       MaskImageType;
+  typedef itk::VariableLengthVector<FloatVectorImageType::InternalPixelType>                   MeasurementType;
+  typedef otb::StatisticsXMLFileReader<MeasurementType>                                        StatisticsReader;
+  typedef otb::ShiftScaleVectorImageFilter<FloatVectorImageType, FloatVectorImageType>         RescalerType;
+  typedef itk::UnaryFunctorImageFilter<
+      FloatImageType,
+      FloatImageType,
+      otb::Functor::AffineFunctor<float,float> >                                               OutputRescalerType;
+  typedef otb::ImageDimensionalityReductionFilter<FloatVectorImageType, FloatVectorImageType, MaskImageType>  DimensionalityReductionFilterType;
+  typedef DimensionalityReductionFilterType::Pointer                                                    DimensionalityReductionFilterPointerType;
+  typedef DimensionalityReductionFilterType::ModelType                                                  ModelType;
+  typedef ModelType::Pointer                                                                   ModelPointerType;
+  typedef DimensionalityReductionFilterType::ValueType                                                  ValueType;
+  typedef DimensionalityReductionFilterType::LabelType                                                  LabelType;
+  typedef otb::DimensionalityReductionModelFactory<ValueType, LabelType>                               DimensionalityReductionModelFactoryType;
+
+protected:
+
+  ~CbDimensionalityReduction() ITK_OVERRIDE
+    {
+    DimensionalityReductionModelFactoryType::CleanFactories();
+    }
+
+private:
+  void DoInit() ITK_OVERRIDE
+  {
+    SetName("DimensionalityReduction");
+    SetDescription("Performs dimensionality reduction of the input image according to a dimensionality reduction model file.");
+
+    // Documentation
+    SetDocName("DimensionalityReduction");
+    SetDocLongDescription("This application reduces the dimension of an input"
+                          " image, based on a machine learning model file produced by"
+                          " the DimensionalityReductionTrainer application. Pixels of the "
+                          "output image will contain the reduced values from"
+                          "the model. The input pixels"
+                          " can be optionally centered and reduced according "
+                          "to the statistics file produced by the "
+                          "ComputeImagesStatistics application. ");
+
+    SetDocLimitations("The input image must contain the feature bands used for"
+                      " the model training. "
+                      "If a statistics file was used during training by the "
+                      "Training application, it is mandatory to use the same "
+                      "statistics file for reduction.");
+    SetDocAuthors("OTB-Team");
+    SetDocSeeAlso("DimensionalityReductionTrainer, ComputeImagesStatistics");
+
+    AddDocTag(Tags::Learning);
+
+    AddParameter(ParameterType_InputImage, "in",  "Input Image");
+    SetParameterDescription( "in", "The input image to predict.");
+
+    AddParameter(ParameterType_InputImage,  "mask",   "Input Mask");
+    SetParameterDescription( "mask", "The mask allow restricting "
+      "classification of the input image to the area where mask pixel values "
+      "are greater than 0.");
+    MandatoryOff("mask");
+
+    AddParameter(ParameterType_InputFilename, "model", "Model file");
+    SetParameterDescription("model", "A regression model file (produced by "
+      "TrainRegression application).");
+
+    AddParameter(ParameterType_InputFilename, "imstat", "Statistics file");
+    SetParameterDescription("imstat", "A XML file containing mean and standard"
+      " deviation to center and reduce samples before prediction "
+      "(produced by ComputeImagesStatistics application). If this file contains"
+      "one more band than the sample size, the last stat of last band will be"
+      "applied to expand the output predicted value");
+    MandatoryOff("imstat");
+
+    AddParameter(ParameterType_OutputImage, "out",  "Output Image");
+    SetParameterDescription( "out", "Output image containing predicted values");
+
+    AddRAMParameter();
+
+   // Doc example parameter settings
+    SetDocExampleParameterValue("in", "QB_1_ortho.tif");
+    SetDocExampleParameterValue("imstat", "EstimateImageStatisticsQB1.xml");
+    SetDocExampleParameterValue("model", "clsvmModelQB1.svm");
+    SetDocExampleParameterValue("out", "clLabeledImageQB1.tif");
+  }
+
+  void DoUpdateParameters() ITK_OVERRIDE
+  {
+    // Nothing to do here : all parameters are independent
+  }
+
+  void DoExecute() ITK_OVERRIDE
+  {
+    // Load input image
+    FloatVectorImageType::Pointer inImage = GetParameterImage("in");
+    inImage->UpdateOutputInformation();
+    unsigned int nbFeatures = inImage->GetNumberOfComponentsPerPixel();
+
+    // Load DR model using a factory
+    otbAppLogINFO("Loading model");
+    m_Model = DimensionalityReductionModelFactoryType::CreateDimensionalityReductionModel(GetParameterString("model"),
+                                                                          DimensionalityReductionModelFactoryType::ReadMode);
+
+    if (m_Model.IsNull())
+      {
+      otbAppLogFATAL(<< "Error when loading model " << GetParameterString("model") << " : unsupported model type");
+      }
+
+    m_Model->Load(GetParameterString("model"));
+    otbAppLogINFO("Model loaded");
+
+    // Classify
+    m_ClassificationFilter = DimensionalityReductionFilterType::New();
+    m_ClassificationFilter->SetModel(m_Model);
+    
+    FloatVectorImageType::Pointer outputImage = m_ClassificationFilter->GetOutput();
+
+    // Normalize input image if asked
+    if(IsParameterEnabled("imstat")  )
+      {
+      otbAppLogINFO("Input image normalization activated.");
+      // Normalize input image (optional)
+      StatisticsReader::Pointer  statisticsReader = StatisticsReader::New();
+      MeasurementType  meanMeasurementVector;
+      MeasurementType  stddevMeasurementVector;
+      m_Rescaler = RescalerType::New();
+      
+      // Load input image statistics
+      statisticsReader->SetFileName(GetParameterString("imstat"));
+      meanMeasurementVector   = statisticsReader->GetStatisticVectorByName("mean");
+      stddevMeasurementVector = statisticsReader->GetStatisticVectorByName("stddev");
+      otbAppLogINFO( "mean used: " << meanMeasurementVector );
+      otbAppLogINFO( "standard deviation used: " << stddevMeasurementVector );
+      if (meanMeasurementVector.Size() != nbFeatures)
+        {
+        otbAppLogFATAL("Wrong number of components in statistics file : "<<meanMeasurementVector.Size());
+        }
+        
+      // Rescale vector image
+      m_Rescaler->SetScale(stddevMeasurementVector);
+      m_Rescaler->SetShift(meanMeasurementVector);
+      m_Rescaler->SetInput(inImage);
+
+      m_ClassificationFilter->SetInput(m_Rescaler->GetOutput());
+      }
+    else
+      {
+      otbAppLogINFO("Input image normalization deactivated.");
+      m_ClassificationFilter->SetInput(inImage);
+      }
+
+
+    if(IsParameterEnabled("mask"))
+      {
+      otbAppLogINFO("Using input mask");
+      // Load mask image and cast into LabeledImageType
+      MaskImageType::Pointer inMask = GetParameterUInt8Image("mask");
+
+      m_ClassificationFilter->SetInputMask(inMask);
+      }
+
+    SetParameterOutputImage<FloatVectorImageType>("out", outputImage);
+
+  }
+
+  DimensionalityReductionFilterType::Pointer m_ClassificationFilter;
+  ModelPointerType m_Model;
+  RescalerType::Pointer m_Rescaler;
+  OutputRescalerType::Pointer m_OutRescaler;
+};
+
+
+}
+}
+
+OTB_APPLICATION_EXPORT(otb::Wrapper::CbDimensionalityReduction)
diff --git a/Modules/Applications/AppDimensionalityReduction/cbDimensionalityReductionTrainer.cxx b/Modules/Applications/AppDimensionalityReduction/cbDimensionalityReductionTrainer.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..4cb042427b0288b1da73b4149afbab4b152fecce
--- /dev/null
+++ b/Modules/Applications/AppDimensionalityReduction/cbDimensionalityReductionTrainer.cxx
@@ -0,0 +1,146 @@
+#include "otbWrapperApplication.h"
+#include "otbWrapperApplicationFactory.h"
+
+#include "otbOGRDataSourceWrapper.h"
+#include "otbOGRFeatureWrapper.h"
+
+#include "itkVariableLengthVector.h"
+
+#include "otbShiftScaleSampleListFilter.h"
+#include "otbStatisticsXMLFileReader.h"
+
+//#include "otbSharkUtils.h"
+
+#include <fstream> // write the model file
+
+#include "DimensionalityReductionModelFactory.h"
+#include "cbLearningApplicationBaseDR.h"
+
+
+namespace otb
+{
+namespace Wrapper
+{
+class CbDimensionalityReductionTrainer : public cbLearningApplicationBaseDR<float,float>
+{
+public:
+	typedef CbDimensionalityReductionTrainer Self;
+	typedef cbLearningApplicationBaseDR<float, float> Superclass;
+	typedef itk::SmartPointer<Self> Pointer;
+	typedef itk::SmartPointer<const Self> ConstPointer;
+	
+	itkNewMacro(Self);
+	itkTypeMacro(CbDimensionalityReductionTrainer, otb::Application);
+
+
+	typedef Superclass::SampleType              SampleType;
+	typedef Superclass::ListSampleType          ListSampleType;
+	typedef Superclass::SampleImageType         SampleImageType;
+	  
+	typedef float ValueType;
+	typedef itk::VariableLengthVector<ValueType> MeasurementType;
+
+	typedef otb::StatisticsXMLFileReader<SampleType> StatisticsReader;
+
+	typedef otb::Statistics::ShiftScaleSampleListFilter<ListSampleType, ListSampleType> ShiftScaleFilterType;
+	
+	typedef otb::DimensionalityReductionModelFactory<ValueType, ValueType>  ModelFactoryType;
+		
+private:
+	void DoInit()
+	{
+		SetName("CbDimensionalityReductionTrainer");
+		SetDescription("Trainer for the dimensionality reduction algorithms used in the cbDimensionalityReduction application.");
+	
+		AddParameter(ParameterType_Group, "io", "Input and output data");
+		SetParameterDescription("io", "This group of parameters allows setting input and output data.");
+
+		AddParameter(ParameterType_InputVectorData, "io.vd", "Input Vector Data");
+		SetParameterDescription("io.vd", "Input geometries used for training (note : all geometries from the layer will be used)");
+
+		AddParameter(ParameterType_OutputFilename, "io.out", "Output model");
+		SetParameterDescription("io.out", "Output file containing the model estimated (.txt format).");
+		
+	
+		AddParameter(ParameterType_InputFilename, "io.stats", "Input XML image statistics file");
+		MandatoryOff("io.stats");
+		SetParameterDescription("io.stats", "XML file containing mean and variance of each feature.");
+
+		AddParameter(ParameterType_StringList, "feat", "Field names to be calculated."); //
+		SetParameterDescription("feat","List of field names in the input vector data used as features for training."); //
+		
+		Superclass::DoInit();
+
+		AddRAMParameter();
+	}
+	
+	void DoUpdateParameters()
+	{
+	}
+	
+	void DoExecute()
+	{	
+
+		std::string shapefile = GetParameterString("io.vd");
+
+		otb::ogr::DataSource::Pointer source = otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Read);
+		otb::ogr::Layer layer = source->GetLayer(0);
+		ListSampleType::Pointer input = ListSampleType::New();
+		const int nbFeatures = GetParameterStringList("feat").size();
+
+		input->SetMeasurementVectorSize(nbFeatures);
+		otb::ogr::Layer::const_iterator it = layer.cbegin();
+		otb::ogr::Layer::const_iterator itEnd = layer.cend();
+		for( ; it!=itEnd ; ++it)
+		{
+			MeasurementType mv;
+			mv.SetSize(nbFeatures);
+			for(int idx=0; idx < nbFeatures; ++idx)
+			{
+				mv[idx] = (*it)[GetParameterStringList("feat")[idx]].GetValue<double>();
+			}
+			input->PushBack(mv);
+		}
+	
+		MeasurementType meanMeasurementVector;
+		MeasurementType stddevMeasurementVector;
+		
+		if (HasValue("io.stats") && IsParameterEnabled("io.stats"))
+		{
+			StatisticsReader::Pointer statisticsReader = StatisticsReader::New();
+			std::string XMLfile = GetParameterString("io.stats");
+			statisticsReader->SetFileName(XMLfile);
+			meanMeasurementVector = statisticsReader->GetStatisticVectorByName("mean");
+			stddevMeasurementVector = statisticsReader->GetStatisticVectorByName("stddev");
+		}
+		else
+		{
+			meanMeasurementVector.SetSize(nbFeatures);
+			meanMeasurementVector.Fill(0.);
+			stddevMeasurementVector.SetSize(nbFeatures);
+			stddevMeasurementVector.Fill(1.);
+		}
+    
+		ShiftScaleFilterType::Pointer trainingShiftScaleFilter = ShiftScaleFilterType::New();
+		trainingShiftScaleFilter->SetInput(input);
+		trainingShiftScaleFilter->SetShifts(meanMeasurementVector);
+		trainingShiftScaleFilter->SetScales(stddevMeasurementVector);
+		trainingShiftScaleFilter->Update();
+
+		ListSampleType::Pointer trainingListSample= trainingShiftScaleFilter->GetOutput();
+	
+	
+		this->Train(trainingListSample,GetParameterString("io.out"));
+	}
+
+
+	
+
+
+};
+
+
+}
+}
+
+OTB_APPLICATION_EXPORT(otb::Wrapper::CbDimensionalityReductionTrainer)
diff --git a/Modules/Applications/AppDimensionalityReduction/cbDimensionalityReductionVector.cxx b/Modules/Applications/AppDimensionalityReduction/cbDimensionalityReductionVector.cxx
new file mode 100644
index 0000000000000000000000000000000000000000..12e1307ad112a8e3a71252a352f029fa0dfd72ca
--- /dev/null
+++ b/Modules/Applications/AppDimensionalityReduction/cbDimensionalityReductionVector.cxx
@@ -0,0 +1,391 @@
+/*
+* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
+*
+* This file is part of Orfeo Toolbox
+*
+* https://www.orfeo-toolbox.org/
+*
+* Licensed under the Apache License, Version 2.0 (the "License");
+* you may not use this file except in compliance with the License.
+* You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+#include "otbWrapperApplication.h"
+#include "otbWrapperApplicationFactory.h"
+#include "otbOGRDataSourceWrapper.h"
+#include "otbOGRFeatureWrapper.h"
+#include "itkVariableLengthVector.h"
+#include "otbStatisticsXMLFileReader.h"
+#include "itkListSample.h"
+#include "otbShiftScaleSampleListFilter.h"
+#include "DimensionalityReductionModelFactory.h"
+//#include "DimensionalityReductionModel.h"
+#include <time.h>
+
+namespace otb
+{
+namespace Wrapper
+{
+	
+/** Utility function to negate std::isalnum */
+bool IsNotAlphaNum(char c)
+{
+return !std::isalnum(c);
+}
+
+class CbDimensionalityReductionVector : public Application
+{
+	public:
+	
+		/** Standard class typedefs. */
+		typedef CbDimensionalityReductionVector Self;
+		typedef Application Superclass;
+		typedef itk::SmartPointer<Self> Pointer;
+		typedef itk::SmartPointer<const Self> ConstPointer;
+		
+		/** Standard macro */
+		itkNewMacro(Self);
+		itkTypeMacro(Self, Application)
+		
+		/** Filters typedef */
+		
+		typedef float 															ValueType;
+		typedef itk::VariableLengthVector<ValueType> 							InputSampleType;
+		typedef itk::Statistics::ListSample<InputSampleType> 					ListSampleType;
+		typedef MachineLearningModel<itk::VariableLengthVector<ValueType>, itk::VariableLengthVector<ValueType>>	DimensionalityReductionModelType;
+		typedef DimensionalityReductionModelFactory<ValueType,ValueType> 		DimensionalityReductionModelFactoryType;
+		typedef DimensionalityReductionModelType::Pointer 						ModelPointerType;
+		
+		/** Statistics Filters typedef */
+		typedef itk::VariableLengthVector<ValueType> 										MeasurementType;
+		typedef otb::StatisticsXMLFileReader<MeasurementType> 								StatisticsReader;
+		typedef otb::Statistics::ShiftScaleSampleListFilter<ListSampleType, ListSampleType> ShiftScaleFilterType;
+		~CbDimensionalityReductionVector() ITK_OVERRIDE
+		{
+		DimensionalityReductionModelFactoryType::CleanFactories();
+		}
+		
+	private:
+	
+		void DoInit() ITK_OVERRIDE
+		{
+		SetName("VectorDimensionalityReduction");
+		SetDescription("Performs dimensionality reduction of the input vector data according to a model file.");
+		SetDocName("Vector Dimensionality Reduction");
+		SetDocAuthors("OTB-Team");
+		SetDocLongDescription("This application performs a vector data dimensionality reduction based on a model file produced by the cbDimensionalityReductionTrainer application.");
+		SetDocSeeAlso("cbDimensionalityReductionTrainer");
+		AddDocTag(Tags::Learning);
+		
+		AddParameter(ParameterType_InputVectorData, "in", "Name of the input vector data");
+		SetParameterDescription("in","The input vector data to reduce.");
+		
+		AddParameter(ParameterType_InputFilename, "instat", "Statistics file");
+		SetParameterDescription("instat", "A XML file containing mean and standard deviation to center"
+		"and reduce samples before dimensionality reduction (produced by ComputeImagesStatistics application).");
+		MandatoryOff("instat");
+		
+		AddParameter(ParameterType_InputFilename, "model", "Model file");
+		SetParameterDescription("model", "A model file (produced by cbDimensionalityReduction application,");
+		
+		AddParameter(ParameterType_ListView, "feat", "Field names to be calculated."); //
+		SetParameterDescription("feat","List of field names in the input vector data used as features for training."); //
+		
+		AddParameter(ParameterType_StringList, "featout", "Field names to be calculated."); //
+		SetParameterDescription("featout","List of field names in the input vector data used as features for training."); //
+		
+		AddParameter(ParameterType_OutputFilename, "out", "Output vector data file containing the reduced vector");
+		SetParameterDescription("out","Output vector data file storing sample values (OGR format)."
+		"If not given, the input vector data file is updated.");
+		MandatoryOff("out");
+				
+		AddParameter(ParameterType_Int, "indim", "Dimension of the input vector");
+		SetParameterDescription("indim","Dimension of the whole input vector, this value is required if only a part of the bands contained in the vector are used."
+		"If not given, the dimension is deduced from the length of the 'feat' parameter");
+		MandatoryOff("indim");
+		
+		AddParameter(ParameterType_Int, "pcadim", "Principal component"); //
+		SetParameterDescription("pcadim","This optional parameter can be set to reduce the number of eignevectors used in the PCA model file."); //
+		MandatoryOff("pcadim");
+		
+		AddParameter(ParameterType_String, "mode", "Writting mode"); //
+		SetParameterString("mode","overwrite", false);
+		SetParameterDescription("mode","This parameter determines if the output file is overwritten or updated [overwrite/update]"); //
+
+		
+		// Doc example parameter settings
+		SetDocExampleParameterValue("in", "vectorData.shp");
+		SetDocExampleParameterValue("instat", "meanVar.xml");
+		SetDocExampleParameterValue("model", "model.txt");
+		SetDocExampleParameterValue("out", "vectorDataOut.shp");
+		SetDocExampleParameterValue("feat", "perimeter area width");
+		SetDocExampleParameterValue("featout", "perimeter area width");
+		//SetOfficialDocLink(); 
+		}
+		//   
+		void DoUpdateParameters() ITK_OVERRIDE
+		{
+			
+			if ( HasValue("in") )
+			{
+				
+				std::string shapefile = GetParameterString("in");
+				otb::ogr::DataSource::Pointer ogrDS;
+				OGRSpatialReference oSRS("");
+				std::vector<std::string> options;
+				ogrDS = otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Read);
+				otb::ogr::Layer layer = ogrDS->GetLayer(0);
+				OGRFeatureDefn &layerDefn = layer.GetLayerDefn();
+				ClearChoices("feat");
+				//ClearChoices("featout");
+				
+				for(int iField=0; iField< layerDefn.GetFieldCount(); iField++)
+				{
+					std::string item = layerDefn.GetFieldDefn(iField)->GetNameRef();
+					std::string key(item);
+					std::string::iterator end = std::remove_if( key.begin(), key.end(), IsNotAlphaNum );
+					std::transform( key.begin(), end, key.begin(), tolower );
+					/*
+					key.erase( std::remove_if(key.begin(),key.end(),IsNotAlphaNum), key.end());
+					std::transform(key.begin(), key.end(), key.begin(), tolower);*/
+					OGRFieldType fieldType = layerDefn.GetFieldDefn(iField)->GetType();
+				/*	if(fieldType == OFTInteger || ogr::version_proxy::IsOFTInteger64(fieldType) || fieldType == OFTReal)
+					{*/
+						//std::string tmpKey="feat."+key;
+						std::string tmpKey = "feat." + key.substr( 0, static_cast<unsigned long>( end - key.begin() ) );
+						AddChoice(tmpKey,item);
+					//}   // this is the same as in otbVectorClassifier, but it doesnt work
+				}
+				
+			}
+			
+		}
+		
+		void DoExecute() ITK_OVERRIDE
+		{
+			clock_t tic = clock();
+			
+			
+			std::string shapefile = GetParameterString("in");
+			otb::ogr::DataSource::Pointer source = otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Read);
+			otb::ogr::Layer layer = source->GetLayer(0);
+			ListSampleType::Pointer input = ListSampleType::New();
+			int nbFeatures = GetSelectedItems("feat").size();
+			
+			input->SetMeasurementVectorSize(nbFeatures);
+			otb::ogr::Layer::const_iterator it = layer.cbegin();
+			otb::ogr::Layer::const_iterator itEnd = layer.cend();
+			
+			for( ; it!=itEnd ; ++it)
+			{
+				MeasurementType mv;
+				mv.SetSize(nbFeatures);
+				
+				for(int idx=0; idx < nbFeatures; ++idx)
+				{
+					mv[idx] = static_cast<float>( (*it)[GetSelectedItems("feat")[idx]].GetValue<double>() );
+					
+				}
+				input->PushBack(mv);
+				
+			}
+				
+			
+			/** Statistics for shift/scale */
+			
+			MeasurementType meanMeasurementVector;
+			MeasurementType stddevMeasurementVector;
+			
+			if (HasValue("instat") && IsParameterEnabled("instat"))
+			{
+				StatisticsReader::Pointer statisticsReader = StatisticsReader::New();
+				std::string XMLfile = GetParameterString("instat");
+				statisticsReader->SetFileName(XMLfile);
+				meanMeasurementVector = statisticsReader->GetStatisticVectorByName("mean");
+				stddevMeasurementVector = statisticsReader->GetStatisticVectorByName("stddev");
+			}
+			else
+			{
+				meanMeasurementVector.SetSize(nbFeatures);
+				meanMeasurementVector.Fill(0.);
+				stddevMeasurementVector.SetSize(nbFeatures);
+				stddevMeasurementVector.Fill(1.);
+			}
+			
+			ShiftScaleFilterType::Pointer trainingShiftScaleFilter = ShiftScaleFilterType::New();
+			trainingShiftScaleFilter->SetInput(input);
+			trainingShiftScaleFilter->SetShifts(meanMeasurementVector);
+			trainingShiftScaleFilter->SetScales(stddevMeasurementVector);
+			trainingShiftScaleFilter->Update();
+			otbAppLogINFO("mean used: " << meanMeasurementVector);
+			otbAppLogINFO("standard deviation used: " << stddevMeasurementVector);
+			otbAppLogINFO("Loading model");
+			
+			
+			/** Read the model */
+
+			m_Model = DimensionalityReductionModelFactoryType::CreateDimensionalityReductionModel(GetParameterString("model"),
+			DimensionalityReductionModelFactoryType::ReadMode);
+			if (m_Model.IsNull())
+			{
+				otbAppLogFATAL(<< "Error when loading model " << GetParameterString("model") << " : unsupported model type");
+			}
+			if (HasValue("pcadim") && IsParameterEnabled("pcadim"))
+			{
+				int dimension = GetParameterInt("pcadim");
+				m_Model->SetDimension(dimension );
+			}
+
+			
+			m_Model->Load(GetParameterString("model"));
+			otbAppLogINFO("Model loaded");
+			
+			/** Perform Dimensionality Reduction */
+			
+			ListSampleType::Pointer listSample = trainingShiftScaleFilter->GetOutput();
+			ListSampleType::Pointer target = m_Model->PredictBatch(listSample);
+			
+			/** Create/Update Output Shape file */
+			
+			std::cout << GetParameterStringList("featout").size() << std::endl;
+			
+			ogr::DataSource::Pointer output;
+			ogr::DataSource::Pointer buffer = ogr::DataSource::New();
+			bool updateMode = false;
+				
+			int nbBands = nbFeatures;
+			if (HasValue("indim") && IsParameterEnabled("indim"))
+				{nbBands = GetParameterInt("indim");} 
+			
+			
+			if (IsParameterEnabled("out") && HasValue("out"))
+			{
+				// Create new OGRDataSource
+				if (GetParameterString("mode")=="overwrite")
+				{
+					output = ogr::DataSource::New(GetParameterString("out"), ogr::DataSource::Modes::Overwrite);
+					otb::ogr::Layer newLayer = output->CreateLayer(GetParameterString("out"),
+					 const_cast<OGRSpatialReference*>(layer.GetSpatialRef()),
+					 layer.GetGeomType());
+					// Copy existing fields
+					OGRFeatureDefn &inLayerDefn = layer.GetLayerDefn();
+					for (int k=0 ; k<inLayerDefn.GetFieldCount()-nbBands ; k++) // we don't copy the original bands 
+					{
+						OGRFieldDefn fieldDefn(inLayerDefn.GetFieldDefn(k));
+						newLayer.CreateField(fieldDefn);
+					}
+				}
+				else if (GetParameterString("mode")=="update")
+				{
+					//output = ogr::DataSource::New(GetParameterString("out"), ogr::DataSource::Modes::Update_LayerCreateOnly);
+					// Update mode
+					otb::ogr::DataSource::Pointer source_output = otb::ogr::DataSource::New(GetParameterString("out"), otb::ogr::DataSource::Modes::Read);
+					layer = source_output->GetLayer(0);
+					updateMode = true;
+					otbAppLogINFO("Update input vector data.");
+					
+					// fill temporary buffer for the transfer
+					otb::ogr::Layer inputLayer = layer;
+					layer = buffer->CopyLayer(inputLayer, std::string("Buffer"));
+					// close input data source 
+					source_output->Clear();
+					// Re-open input data source in update mode
+					output = otb::ogr::DataSource::New(GetParameterString("out"), otb::ogr::DataSource::Modes::Update_LayerUpdate);
+				
+				}
+				else
+				{
+					otbAppLogFATAL(<< "Error when creating the output file" << GetParameterString("mode") << " : unsupported writting mode type");
+				}
+				
+			
+			}
+			
+
+		
+			
+			otb::ogr::Layer outLayer = output->GetLayer(0);
+			
+			OGRErr errStart = outLayer.ogr().StartTransaction();
+			
+			if (errStart != OGRERR_NONE)
+			{
+				itkExceptionMacro(<< "Unable to start transaction for OGR layer " << outLayer.ogr().GetName() << ".");
+			}
+			
+			// Add the field of prediction in the output layer if field not exist
+			
+			for (int i=0; i<GetParameterStringList("featout").size() ;i++)
+			{
+				OGRFeatureDefn &layerDefn = outLayer.GetLayerDefn();
+				int idx = layerDefn.GetFieldIndex(GetParameterStringList("featout")[i].c_str());
+				
+				if (idx >= 0)
+				{
+					if (layerDefn.GetFieldDefn(idx)->GetType() != OFTReal)
+					itkExceptionMacro("Field name "<< GetParameterStringList("featout")[i] << " already exists with a different type!");
+				}
+				else
+				{
+					OGRFieldDefn predictedField(GetParameterStringList("featout")[i].c_str(), OFTReal);
+					ogr::FieldDefn predictedFieldDef(predictedField);
+					outLayer.CreateField(predictedFieldDef);
+				}
+			}
+			
+		
+			// Fill output layer
+			
+			unsigned int count=0;
+			auto classfieldname = GetParameterStringList("featout");
+			it = layer.cbegin();
+			itEnd = layer.cend();
+		
+			for( ; it!=itEnd ; ++it, ++count)
+			{
+				ogr::Feature dstFeature(outLayer.GetLayerDefn());
+			
+				dstFeature.SetFrom( *it , TRUE);
+				dstFeature.SetFID(it->GetFID());
+			
+				
+				
+				for (std::size_t i=0; i<classfieldname.size(); ++i){
+					dstFeature[classfieldname[i]].SetValue<double>(target->GetMeasurementVector(count)[i]);
+				}
+				if (updateMode)
+				{
+					outLayer.SetFeature(dstFeature);
+				}
+				else
+				{
+					outLayer.CreateFeature(dstFeature);
+				}
+			}
+			
+			if(outLayer.ogr().TestCapability("Transactions"))
+			{
+				const OGRErr errCommitX = outLayer.ogr().CommitTransaction();
+				if (errCommitX != OGRERR_NONE)
+				{
+					itkExceptionMacro(<< "Unable to commit transaction for OGR layer " << outLayer.ogr().GetName() << ".");
+				}
+			}
+			output->SyncToDisk();
+			clock_t toc = clock();
+			otbAppLogINFO( "Elapsed: "<< ((double)(toc - tic) / CLOCKS_PER_SEC)<<" seconds.");
+		}
+		
+		ModelPointerType m_Model;
+};
+}
+}
+OTB_APPLICATION_EXPORT(otb::Wrapper::CbDimensionalityReductionVector)
diff --git a/Modules/Learning/DimensionalityReduction/README.md b/Modules/Learning/DimensionalityReduction/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..a46b8fbf8af741f44690a747f973b08a5114e2d0
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/README.md
@@ -0,0 +1,25 @@
+This module contains a new dimensionality reduction framework for the Orfeo Toolbox.
+
+The framework is based on Machine learning models and a dimensionality reduction algorithm 
+can be trained and used using the model class, in the same fashion as Machine Learning Models
+for Classification (supervised or unspervised) and Regression.
+
+The algorithms featured in the module are (27/06/2017) :
+ - autoencoders and multi layer autoencoders, with several regularization options
+ - PCA 
+ - SOM 
+ 
+ Autoencoders and PCA models are using Shark ML library, while SOM model is based on the SOM classes of the OTB.
+ 
+ More specifically, the module contains :
+ 
+ - Autoencoder models, PCA models and SOM models, with methods for training, serialization and prediction (i.e. reduction)
+ - A Dimensionality Reduction Model Factory and a factories for each model, which allow the user to create objects of the model classes
+ - A (OTB ImageToImage) filter that can be used to perform dimensionality reduction on an image. This filter supports threading and streaming
+ - An application for training the models according to a shapefile
+ - An application for using a trained model on a shapefile
+ - An application for using a trained model on an image (using the filter)
+ 
+			/!\ Work In Progress /!\
+ 
+
diff --git a/Modules/Learning/DimensionalityReduction/include/AutoencoderModel.h b/Modules/Learning/DimensionalityReduction/include/AutoencoderModel.h
new file mode 100644
index 0000000000000000000000000000000000000000..a8cbe6c0f5608b237519d2571236d4b90decbc4f
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/AutoencoderModel.h
@@ -0,0 +1,139 @@
+#ifndef AutoencoderModel_h
+#define AutoencoderModel_h
+
+#include "otbMachineLearningModelTraits.h"
+#include "otbMachineLearningModel.h"
+#include <fstream>
+#include <shark/Algorithms/StoppingCriteria/AbstractStoppingCriterion.h>
+
+#include <shark/Models/FFNet.h>
+#include <shark/Models/Autoencoder.h>
+namespace otb
+{
+template <class TInputValue, class NeuronType>
+class ITK_EXPORT AutoencoderModel: public  MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>>   
+{
+
+public:
+	
+	typedef AutoencoderModel Self;
+	typedef MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>> Superclass;
+	typedef itk::SmartPointer<Self> Pointer;
+	typedef itk::SmartPointer<const Self> ConstPointer;
+
+	typedef typename Superclass::InputValueType 			InputValueType;
+	typedef typename Superclass::InputSampleType 			InputSampleType;
+	typedef typename Superclass::InputListSampleType 		InputListSampleType;
+	typedef typename InputListSampleType::Pointer 			ListSamplePointerType;
+	typedef typename Superclass::TargetValueType 			TargetValueType;
+	typedef typename Superclass::TargetSampleType 			TargetSampleType;
+	typedef typename Superclass::TargetListSampleType 		TargetListSampleType;
+
+	/// Confidence map related typedefs
+	
+	typedef typename Superclass::ConfidenceValueType  				ConfidenceValueType;
+	typedef typename Superclass::ConfidenceSampleType 				ConfidenceSampleType;
+	typedef typename Superclass::ConfidenceListSampleType      		ConfidenceListSampleType;
+
+	/// Neural network related typedefs
+	//typedef shark::Autoencoder<NeuronType,shark::LinearNeuron> OutAutoencoderType;
+	typedef shark::Autoencoder<NeuronType,shark::LinearNeuron> OutAutoencoderType;
+	typedef shark::Autoencoder<NeuronType,NeuronType> AutoencoderType;
+	typedef shark::FFNet<NeuronType,shark::LinearNeuron> NetworkType;
+	
+	
+	itkNewMacro(Self);
+	itkTypeMacro(AutoencoderModel, DimensionalityReductionModel);
+
+	//unsigned int GetDimension() {return m_NumberOfHiddenNeurons[m_net.size()-1];};  // Override the Dimensionality Reduction model method, it is used in the dimensionality reduction filter to set the output image size
+	itkGetMacro(NumberOfHiddenNeurons,itk::Array<unsigned int>);
+	itkSetMacro(NumberOfHiddenNeurons,itk::Array<unsigned int>);
+
+	itkGetMacro(NumberOfIterations,unsigned int);
+	itkSetMacro(NumberOfIterations,unsigned int);
+	
+	itkGetMacro(NumberOfIterationsFineTuning,unsigned int);
+	itkSetMacro(NumberOfIterationsFineTuning,unsigned int);
+	
+	itkGetMacro(Epsilon,double);
+	itkSetMacro(Epsilon,double);
+
+	itkGetMacro(InitFactor,double);
+	itkSetMacro(InitFactor,double);
+
+	itkGetMacro(Regularization,itk::Array<double>);
+	itkSetMacro(Regularization,itk::Array<double>);
+
+	itkGetMacro(Noise,itk::Array<double>);
+	itkSetMacro(Noise,itk::Array<double>);
+
+	itkGetMacro(Rho,itk::Array<double>);
+	itkSetMacro(Rho,itk::Array<double>);
+
+	itkGetMacro(Beta,itk::Array<double>);
+	itkSetMacro(Beta,itk::Array<double>);
+
+	itkGetMacro(WriteLearningCurve,bool);
+	itkSetMacro(WriteLearningCurve,bool);
+		
+	itkSetMacro(WriteWeights, bool);
+	itkGetMacro(WriteWeights, bool);
+	
+	itkGetMacro(LearningCurveFileName,std::string);
+	itkSetMacro(LearningCurveFileName,std::string);
+
+	bool CanReadFile(const std::string & filename);
+	bool CanWriteFile(const std::string & filename);
+
+	void Save(const std::string & filename, const std::string & name="")  ITK_OVERRIDE;
+	void Load(const std::string & filename, const std::string & name="")  ITK_OVERRIDE;
+
+	void Train() ITK_OVERRIDE;
+	
+	template <class T, class Autoencoder>
+	void TrainOneLayer(shark::AbstractStoppingCriterion<T> & criterion,Autoencoder &,unsigned int, unsigned int,double, double, shark::Data<shark::RealVector> &, std::ostream&);
+	
+	template <class T, class Autoencoder>
+	void TrainOneSparseLayer(shark::AbstractStoppingCriterion<T> & criterion,Autoencoder &, unsigned int, unsigned int,double, double,double, shark::Data<shark::RealVector> &, std::ostream&);
+	
+	template <class T>
+	void TrainNetwork(shark::AbstractStoppingCriterion<T> & criterion,double, double,double, shark::Data<shark::RealVector> &, std::ostream&);
+	
+protected:
+	AutoencoderModel();	
+	~AutoencoderModel() ITK_OVERRIDE;
+ 
+	virtual TargetSampleType DoPredict(const InputSampleType& input, ConfidenceValueType * quality = ITK_NULLPTR) const;
+
+	virtual void DoPredictBatch(const InputListSampleType *, const unsigned int & startIndex, const unsigned int & size, TargetListSampleType *, ConfidenceListSampleType * quality = ITK_NULLPTR) const;
+  
+private:
+	
+	/** Network attributes */
+	//std::vector<AutoencoderType> m_net;
+	NetworkType m_net;
+	itk::Array<unsigned int> m_NumberOfHiddenNeurons;
+	/** Training parameters */
+	unsigned int m_NumberOfIterations; // stop the training after a fixed number of iterations
+	unsigned int m_NumberOfIterationsFineTuning; // stop the fine tuning after a fixed number of iterations
+	double m_Epsilon; // Stops the training when the training error seems to converge
+	itk::Array<double> m_Regularization;  // L2 Regularization parameter
+	itk::Array<double> m_Noise;  // probability for an input to be set to 0 (denosing autoencoder)
+	itk::Array<double> m_Rho; // Sparsity parameter
+	itk::Array<double> m_Beta; // Sparsity regularization parameter
+	double m_InitFactor; // Weight initialization factor (the weights are intialized at m_initfactor/sqrt(inputDimension)  )
+	
+	bool m_WriteLearningCurve; // Flag for writting the learning curve into a txt file
+	std::string m_LearningCurveFileName; // Name of the output learning curve printed after training
+	bool m_WriteWeights;
+};
+} // end namespace otb
+
+
+#ifndef OTB_MANUAL_INSTANTIATION
+#include "AutoencoderModel.txx"
+#endif
+
+
+#endif
+
diff --git a/Modules/Learning/DimensionalityReduction/include/AutoencoderModel.txx b/Modules/Learning/DimensionalityReduction/include/AutoencoderModel.txx
new file mode 100644
index 0000000000000000000000000000000000000000..88c77dd5aee7ebbc90893fac7a9fe5a0ca55b4b2
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/AutoencoderModel.txx
@@ -0,0 +1,485 @@
+#ifndef AutoencoderModel_txx
+#define AutoencoderModel_txx
+
+#include <fstream>
+#include <shark/Data/Dataset.h>
+#include "itkMacro.h"
+#include "otbSharkUtils.h"
+
+//include train function
+#include <shark/ObjectiveFunctions/ErrorFunction.h>
+#include <shark/ObjectiveFunctions/SparseAutoencoderError.h>//the error function performing the regularisation of the hidden neurons
+
+#include <shark/Algorithms/GradientDescent/Rprop.h>// the RProp optimization algorithm
+#include <shark/ObjectiveFunctions/Loss/SquaredLoss.h> // squared loss used for regression
+#include <shark/ObjectiveFunctions/Regularizer.h> //L2 regulariziation
+#include <shark/Models/ImpulseNoiseModel.h> //noise source to corrupt the inputs
+#include <shark/Models/ConcatenatedModel.h>//to concatenate the noise with the model
+
+#include <shark/Algorithms/StoppingCriteria/MaxIterations.h> //A simple stopping criterion that stops after a fixed number of iterations
+#include <shark/Algorithms/StoppingCriteria/TrainingProgress.h> //Stops when the algorithm seems to converge, Tracks the progress of the training error over a period of time
+
+#include <shark/Algorithms/GradientDescent/SteepestDescent.h>
+
+namespace otb
+{
+
+template <class TInputValue, class NeuronType>
+AutoencoderModel<TInputValue,NeuronType>::AutoencoderModel()
+{
+	this->m_IsDoPredictBatchMultiThreaded = true;
+	this->m_WriteLearningCurve = false;
+}
+
+   
+template <class TInputValue, class NeuronType>
+AutoencoderModel<TInputValue,NeuronType>::~AutoencoderModel()
+{
+}
+
+template <class TInputValue, class NeuronType>
+void AutoencoderModel<TInputValue,NeuronType>::Train()
+{
+	std::vector<shark::RealVector> features;
+	Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
+	shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features );
+	shark::Data<shark::RealVector> inputSamples_copy = inputSamples;
+	
+	std::ofstream ofs;
+	if (this->m_WriteLearningCurve =true) 
+	{
+		ofs.open(m_LearningCurveFileName);
+		ofs << "learning curve" << std::endl; 
+	}
+	
+	/// Initialization of the feed forward neural network
+	std::vector<size_t> layers;
+	layers.push_back(shark::dataDimension(inputSamples));
+	for (unsigned int i = 0 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
+	{
+		layers.push_back(m_NumberOfHiddenNeurons[i]);
+	}
+	
+	for (unsigned int i = std::max(0,static_cast<int>(m_NumberOfHiddenNeurons.Size()-1)) ; i > 0; --i)
+	{
+		std::cout << i << std::endl;
+		layers.push_back(m_NumberOfHiddenNeurons[i-1]);
+	}
+	
+	layers.push_back(shark::dataDimension(inputSamples));
+	m_net.setStructure(layers);
+	shark::initRandomNormal(m_net,0.1);
+
+
+	/// Training of the first Autoencoder (first and last layer of the FF network)
+	if (m_Epsilon > 0){
+		shark::TrainingProgress<> criterion(5,m_Epsilon);
+		
+		OutAutoencoderType net;
+		if (m_Noise[0] != 0)   // Shark doesn't allow to train a layer using a sparsity term AND a noisy input. (shark::SparseAutoencoderError takes an autoen
+		{
+			TrainOneLayer(criterion,net,0 , m_NumberOfHiddenNeurons[0],m_Noise[0],m_Regularization[0], inputSamples,ofs);
+		}
+		else
+		{
+			TrainOneSparseLayer( criterion, net , 0 , m_NumberOfHiddenNeurons[0],m_Rho[0],m_Beta[0],m_Regularization[0],inputSamples, ofs);
+		}
+		criterion.reset();
+	}
+	
+	else {
+		shark::MaxIterations<> criterion(m_NumberOfIterations);
+
+		OutAutoencoderType net;
+		if (m_Noise[0] != 0)   // Shark doesn't allow to train a layer using a sparsity term AND a noisy input. (shark::SparseAutoencoderError takes an autoen
+		{
+			TrainOneLayer(criterion,net,0, m_NumberOfHiddenNeurons[0],m_Noise[0],m_Regularization[0], inputSamples, ofs);
+			std::cout << "mnoise " << m_Noise[0] << std::endl;
+		}
+		else
+		{
+			TrainOneSparseLayer(criterion, net, 0, m_NumberOfHiddenNeurons[0],m_Rho[0],m_Beta[0],m_Regularization[0], inputSamples, ofs);
+		}
+		criterion.reset();
+	}
+
+
+	/// Training of the other autoencoders 
+	if (m_Epsilon > 0){
+		shark::TrainingProgress<> criterion(5,m_Epsilon);
+		
+		for (unsigned int i = 1 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
+		{
+			AutoencoderType net;
+			if (m_Noise[i] != 0)   // Shark doesn't allow to train a layer using a sparsity term AND a noisy input. (shark::SparseAutoencoderError takes an autoen
+			{
+				TrainOneLayer(criterion,net,i , m_NumberOfHiddenNeurons[i],m_Noise[i],m_Regularization[i], inputSamples,ofs);
+			}
+			else
+			{
+				TrainOneSparseLayer( criterion, net , i , m_NumberOfHiddenNeurons[i],m_Rho[i],m_Beta[i],m_Regularization[i],inputSamples, ofs);
+			}
+			criterion.reset();
+		}
+
+	}
+	
+	else {
+		shark::MaxIterations<> criterion(m_NumberOfIterations);
+		
+		for (unsigned int i = 1 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
+		{
+			AutoencoderType net;
+			if (m_Noise[i] != 0)   // Shark doesn't allow to train a layer using a sparsity term AND a noisy input. (shark::SparseAutoencoderError takes an autoen
+			{
+				TrainOneLayer(criterion,net, i,m_NumberOfHiddenNeurons[i],m_Noise[i],m_Regularization[i], inputSamples, ofs);
+				std::cout << "mnoise " << m_Noise[i] << std::endl;
+			}
+			else
+			{
+				TrainOneSparseLayer(criterion, net, i, m_NumberOfHiddenNeurons[i],m_Rho[i],m_Beta[i],m_Regularization[i], inputSamples, ofs);
+			}
+			criterion.reset();
+		}
+		
+	}
+	if (m_NumberOfIterationsFineTuning > 0)
+	{
+		shark::MaxIterations<> criterion(m_NumberOfIterationsFineTuning);
+		TrainNetwork(criterion, m_Rho[0],m_Beta[0],m_Regularization[0], inputSamples_copy, ofs);
+	}		
+}
+
+template <class TInputValue, class NeuronType>
+template <class T, class Autoencoder>
+void AutoencoderModel<TInputValue,NeuronType>::TrainOneLayer(shark::AbstractStoppingCriterion<T> & criterion, Autoencoder & net,unsigned int layer_index, unsigned int nbneuron,double noise_strength,double regularization, shark::Data<shark::RealVector> &samples, std::ostream& File)
+{
+	//AutoencoderType net;
+	std::cout << "noise " <<  noise_strength << std::endl;
+	std::size_t inputs = dataDimension(samples);
+	net.setStructure(inputs, nbneuron);
+	initRandomUniform(net,-m_InitFactor*std::sqrt(1.0/inputs),m_InitFactor*std::sqrt(1.0/inputs));
+	
+	//initRandomUniform(net,-1,1);
+	shark::ImpulseNoiseModel noise(inputs,noise_strength,1.0); //set an input pixel with probability m_Noise to 0
+	shark::ConcatenatedModel<shark::RealVector,shark::RealVector> model = noise>> net;
+	shark::LabeledData<shark::RealVector,shark::RealVector> trainSet(samples,samples);//labels identical to inputs
+	shark::SquaredLoss<shark::RealVector> loss;
+	shark::ErrorFunction error(trainSet, &model, &loss);
+
+
+	shark::TwoNormRegularizer regularizer(error.numberOfVariables());
+	error.setRegularizer(regularization,&regularizer);
+
+	shark::IRpropPlusFull optimizer;
+	error.init();
+	optimizer.init(error);
+	
+	std::cout<<"error before training : " << optimizer.solution().value<<std::endl;
+	if (this->m_WriteLearningCurve =true) 
+	{
+		File << "end layer" << std::endl;
+	}
+	
+	unsigned int i=0;
+	do{
+		i++;
+		optimizer.step(error);
+		if (this->m_WriteLearningCurve =true) 
+		{	
+		File << optimizer.solution().value << std::endl;
+		}
+		std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<  std::endl ;
+	
+	} while( !criterion.stop( optimizer.solution() ) );
+	
+	net.setParameterVector(optimizer.solution().point);
+	//m_net.push_back(net);
+	m_net.setLayer(layer_index,net.encoderMatrix(),net.hiddenBias());  // Copy the encoder in the FF neural network
+	m_net.setLayer( m_NumberOfHiddenNeurons.Size()*2 - 1 - layer_index,net.decoderMatrix(),net.outputBias()); // Copy the decoder in the FF neural network
+	samples = net.encode(samples);
+}
+
+
+template <class TInputValue, class NeuronType>
+template <class T, class Autoencoder>
+void AutoencoderModel<TInputValue,NeuronType>::TrainOneSparseLayer(shark::AbstractStoppingCriterion<T> & criterion, Autoencoder & net, unsigned int layer_index, unsigned int nbneuron,double rho,double beta, double regularization, shark::Data<shark::RealVector> &samples, std::ostream& File)
+{
+	//AutoencoderType net;
+	std::size_t inputs = dataDimension(samples);
+	net.setStructure(inputs, nbneuron);
+	
+	shark::initRandomUniform(net,-m_InitFactor*std::sqrt(1.0/inputs),m_InitFactor*std::sqrt(1.0/inputs));
+	
+	/*   Idea : set the initials value for the output weights higher than the input weights
+	auto weights = net.parameterVector();
+	
+	for(unsigned int i=net.inputSize()*net.numberOfHiddenNeurons(); i< (net.inputSize()+net.outputSize())*net.numberOfHiddenNeurons(); i++ )
+	{
+		 weights(i) *= 100;
+		 std::cout << weights(i) << std::endl;
+	}
+	net.setParameterVector(weights);
+	std::cout << "dec" << net.decoderMatrix()<< std::endl;
+	*/
+	
+	
+	
+	//std::cout << "initial seed" << net.parameterVector() << std::endl;
+	//initRandomUniform(net,-1,1);
+	shark::LabeledData<shark::RealVector,shark::RealVector> trainSet(samples,samples);//labels identical to inputs
+	shark::SquaredLoss<shark::RealVector> loss;
+	shark::SparseAutoencoderError error(trainSet,&net, &loss, rho, beta);
+	
+	shark::TwoNormRegularizer regularizer(error.numberOfVariables());
+	error.setRegularizer(regularization,&regularizer);
+	std::cout << samples.element(0) << std::endl;
+	/*shark::SteepestDescent optimizer;
+	error.init();
+	optimizer.init(error);
+	optimizer.setLearningRate(0.01);
+	std::cout << optimizer.learningRate() << std::endl;
+	*/
+	shark::IRpropPlusFull optimizer;
+	error.init();
+	optimizer.init(error);
+	
+	
+	std::cout<<"error before training : " << optimizer.solution().value<<std::endl;
+	unsigned int i=0;
+	do{
+		
+		i++;
+		optimizer.step(error);
+		std::cout<<"error after " << i << "iterations : " << optimizer.solution().value <<std::endl;
+		if (this->m_WriteLearningCurve =true) 
+		{	
+		File << optimizer.solution().value << std::endl;
+		}
+	} while( !criterion.stop( optimizer.solution() ) );
+	if (this->m_WriteLearningCurve =true) 
+	{
+		File << "end layer" << std::endl;
+	}
+	net.setParameterVector(optimizer.solution().point);
+	//m_net.push_back(net);
+	m_net.setLayer(layer_index,net.encoderMatrix(),net.hiddenBias());  // Copy the encoder in the FF neural network
+	m_net.setLayer( m_NumberOfHiddenNeurons.Size()*2 - 1 - layer_index,net.decoderMatrix(),net.outputBias()); // Copy the decoder in the FF neural network
+	samples = net.encode(samples);
+	
+}
+
+
+template <class TInputValue, class NeuronType>
+template <class T>
+void AutoencoderModel<TInputValue,NeuronType>::TrainNetwork(shark::AbstractStoppingCriterion<T> & criterion,double rho,double beta, double regularization, shark::Data<shark::RealVector> &samples, std::ostream& File)
+{
+	
+	shark::LabeledData<shark::RealVector,shark::RealVector> trainSet(samples,samples);//labels identical to inputs
+	shark::SquaredLoss<shark::RealVector> loss;
+	
+	shark::ErrorFunction error(trainSet, &m_net, &loss);
+	shark::TwoNormRegularizer regularizer(error.numberOfVariables());
+	error.setRegularizer(regularization,&regularizer);
+
+	shark::IRpropPlusFull optimizer;
+	error.init();
+	optimizer.init(error);
+	std::cout<<"error before training : " << optimizer.solution().value<<std::endl;
+	unsigned int i=0;
+	while( !criterion.stop( optimizer.solution() ) )
+	{
+		i++;
+		optimizer.step(error);
+		std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
+		if (this->m_WriteLearningCurve =true) 
+		{	
+			File << optimizer.solution().value << std::endl;
+		}
+	} 
+	//std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
+}
+
+
+
+template <class TInputValue, class NeuronType>
+bool AutoencoderModel<TInputValue,NeuronType>::CanReadFile(const std::string & filename)
+{
+	try
+	{
+		this->Load(filename);
+		m_net.name();
+	}
+	catch(...)
+	{
+	return false;
+	}
+	return true;
+}
+
+
+template <class TInputValue, class NeuronType>
+bool AutoencoderModel<TInputValue,NeuronType>::CanWriteFile(const std::string & filename)
+{
+	return true;
+}
+
+template <class TInputValue, class NeuronType>
+void AutoencoderModel<TInputValue,NeuronType>::Save(const std::string & filename, const std::string & name)
+{
+	std::cout << "saving model ..." << std::endl;
+	std::ofstream ofs(filename);
+	ofs << m_net.name() << std::endl; // the first line of the model file contains a key
+	boost::archive::polymorphic_text_oarchive oa(ofs);
+	//m_net.write(oa);
+	oa << m_net;
+	ofs.close();
+	
+	
+	if (this->m_WriteWeights == true)     // output the map vectors in a txt file
+	{
+		std::ofstream otxt(filename+".txt");
+		
+		
+		for (unsigned int i = 0 ; i < m_net.layerMatrices().size(); ++i)
+		{
+			otxt << "layer " << i << std::endl;
+			otxt << m_net.layerMatrix(i) << std::endl;
+			otxt << m_net.bias(i) << std::endl;
+			otxt << std::endl;
+		}
+		
+	/*	
+		std::vector<shark::RealVector> features;
+	
+		shark::SquaredLoss<shark::RealVector> loss;
+		Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
+		shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features );
+		shark::Data<shark::RealVector> outputSamples = inputSamples;
+		
+		for (unsigned int i = 0 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
+		{
+			outputSamples = m_net[i].encode(outputSamples);
+		}
+		
+		for (unsigned int i = 0 ; i < m_NumberOfHiddenNeurons.Size(); ++i)
+		{
+			outputSamples = m_net[m_NumberOfHiddenNeurons.Size()-i-1].decode(outputSamples);   // We decode the data starting from the smallest layer 
+		}
+		otxt << "Reconstruction error : " << loss.eval(inputSamples,outputSamples) << std::endl; // the mean squared error is returned
+		std::cout << "Reconstruction error : " << loss.eval(inputSamples,outputSamples) << std::endl; // the mean squared error is returned
+		
+		std::cout << "in" << inputSamples.element(0) << std::endl;
+		std::cout << "out" << outputSamples.element(0) << std::endl;
+		
+		otxt.close();
+		
+		*/
+	}
+	
+	
+	
+}
+
+template <class TInputValue, class NeuronType>
+void AutoencoderModel<TInputValue,NeuronType>::Load(const std::string & filename, const std::string & name)
+{
+	
+	NetworkType net;
+	std::ifstream ifs(filename);
+	char autoencoder[256];
+	ifs.getline(autoencoder,256); 
+	std::string autoencoderstr(autoencoder);
+	
+	std::cout << autoencoderstr << std::endl;
+	if (autoencoderstr != net.name()){
+		itkExceptionMacro(<< "Error opening " << filename.c_str() );
+    }
+	boost::archive::polymorphic_text_iarchive ia(ifs);
+	//m_net.read(ia);
+	ia >> m_net;
+	ifs.close();
+	
+	// This gives us the dimension if we keep the encoder and decoder
+	size_t feature_layer_index = m_net.layerMatrices().size()/2; 
+	this->m_Dimension = m_net.layerMatrix(feature_layer_index).size1(); // number of neurons in the feature layer (first dimension of the first decoder weight matrix)
+	std::cout << this->m_Dimension << std::endl;
+	
+	
+	
+	/* This might not be needed
+	int number_of_layers = m_net.layerMatrices().size()/2;
+	std::cout << "number of layers" << number_of_layers << std::endl;
+	m_NumberOfHiddenNeurons.SetSize(number_of_layers);
+	for (int i=0; i<number_of_layers; i++){ 
+		m_NumberOfHiddenNeurons[i] = m_net[i].numberOfHiddenNeurons();
+	}
+	this->m_Dimension = m_NumberOfHiddenNeurons[number_of_layers-1];
+	*/
+}
+
+
+template <class TInputValue, class NeuronType>
+typename AutoencoderModel<TInputValue,NeuronType>::TargetSampleType
+AutoencoderModel<TInputValue,NeuronType>::DoPredict(const InputSampleType & value, ConfidenceValueType * quality) const
+{  
+	
+	shark::RealVector samples(value.Size());
+	for(size_t i = 0; i < value.Size();i++)
+    {
+		samples[i]=value[i];
+    }
+    
+    std::vector<shark::RealVector> features;
+    features.push_back(samples);
+   
+    shark::Data<shark::RealVector> data = shark::createDataFromRange(features);
+    
+    data = m_net.evalLayer( m_net.layerMatrices().size()/2-1 ,data); // features layer for a network containing the encoder and decoder part
+    /*
+	for (int i=0; i<m_net.size(); i++){ // loop over all autoencoders in m_net
+		data = m_net[i].encode(data);    
+	}*/
+    TargetSampleType target;
+    target.SetSize(this->m_Dimension);
+	
+	for(unsigned int a = 0; a < this->m_Dimension; ++a){
+		target[a]=data.element(0)[a];
+	}
+	return target;
+	
+}
+
+
+template <class TInputValue, class NeuronType>
+void AutoencoderModel<TInputValue,NeuronType>
+::DoPredictBatch(const InputListSampleType *input, const unsigned int & startIndex, const unsigned int & size, TargetListSampleType * targets, ConfidenceListSampleType * quality) const
+{
+	
+	std::vector<shark::RealVector> features;
+	Shark::ListSampleRangeToSharkVector(input, features,startIndex,size);
+	shark::Data<shark::RealVector> data = shark::createDataFromRange(features);
+	TargetSampleType target;
+	/*
+	for (auto net :m_net ){ // loop over all autoencoders in m_net
+		data = net.encode(data);    
+	}
+	*/
+	data = m_net.evalLayer( m_net.layerMatrices().size()/2-1 ,data);   // features layer for a network containing the encoder and decoder part
+	
+	unsigned int id = startIndex;
+	target.SetSize(this->m_Dimension);
+	
+	for(const auto& p : data.elements())
+	{
+		for(unsigned int a = 0; a < this->m_Dimension; ++a){
+			target[a]=p[a];
+		}
+		targets->SetMeasurementVector(id,target);
+		++id;	
+    }
+    
+}
+
+} // namespace otb
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/AutoencoderModelFactory.h b/Modules/Learning/DimensionalityReduction/include/AutoencoderModelFactory.h
new file mode 100644
index 0000000000000000000000000000000000000000..c9c993b57e7e258e4671777ae2b0e96bee4c4b34
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/AutoencoderModelFactory.h
@@ -0,0 +1,70 @@
+#ifndef AutoencoderModelFactory_h
+#define AutoencoderModelFactory_h
+
+
+#include <shark/Models/TiedAutoencoder.h>
+#include <shark/Models/Autoencoder.h>
+#include "itkObjectFactoryBase.h"
+#include "itkImageIOBase.h"
+
+namespace otb
+{
+	
+template <class TInputValue, class TTargetValue, class NeuronType>
+class ITK_EXPORT AutoencoderModelFactoryBase : public itk::ObjectFactoryBase
+{
+public:
+  /** Standard class typedefs. */
+  typedef AutoencoderModelFactoryBase   Self;
+  typedef itk::ObjectFactoryBase        Superclass;
+  typedef itk::SmartPointer<Self>       Pointer;
+  typedef itk::SmartPointer<const Self> ConstPointer;
+
+  /** Class methods used to interface with the registered factories. */
+  const char* GetITKSourceVersion(void) const ITK_OVERRIDE;
+  const char* GetDescription(void) const ITK_OVERRIDE;
+
+  /** Method for class instantiation. */
+  itkFactorylessNewMacro(Self);
+
+  /** Run-time type information (and related methods). */
+  itkTypeMacro(AutoencoderModelFactoryBase, itk::ObjectFactoryBase);
+
+  /** Register one factory of this type  */
+  static void RegisterOneFactory(void)
+  {
+    Pointer AEFactory = AutoencoderModelFactoryBase::New();
+    itk::ObjectFactoryBase::RegisterFactory(AEFactory);
+  }
+
+protected:
+  AutoencoderModelFactoryBase();
+  ~AutoencoderModelFactoryBase() ITK_OVERRIDE;
+
+private:
+  AutoencoderModelFactoryBase(const Self &); //purposely not implemented
+  void operator =(const Self&); //purposely not implemented
+
+};
+
+
+
+/*
+template <class TInputValue, class TTargetValue>
+using AutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::Autoencoder< shark::TanhNeuron, shark::LinearNeuron>>  ;
+
+
+template <class TInputValue, class TTargetValue>
+using TiedAutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::TiedAutoencoder< shark::TanhNeuron, shark::LinearNeuron>>  ;
+*/
+
+} //namespace otb
+
+
+#ifndef OTB_MANUAL_INSTANTIATION
+#include "AutoencoderModelFactory.txx"
+#endif
+
+#endif
+
+
diff --git a/Modules/Learning/DimensionalityReduction/include/AutoencoderModelFactory.txx b/Modules/Learning/DimensionalityReduction/include/AutoencoderModelFactory.txx
new file mode 100644
index 0000000000000000000000000000000000000000..47b4669ab4ad2761bd3174ac71b5d96d74b9becf
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/AutoencoderModelFactory.txx
@@ -0,0 +1,64 @@
+/*=========================================================================
+
+  Program:   ORFEO Toolbox
+  Language:  C++
+  Date:      $Date$
+  Version:   $Revision$
+
+
+  Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+  See OTBCopyright.txt for details.
+
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notices for more information.
+
+=========================================================================*/
+#ifndef AutoencoderModelFactory_txx
+#define AutoencoderModelFactory_txx
+
+
+#include "AutoencoderModelFactory.h"
+
+#include "itkCreateObjectFunction.h"
+#include "AutoencoderModel.h"
+#include "itkVersion.h"
+
+namespace otb
+{
+template <class TInputValue, class TOutputValue, class NeuronType>
+AutoencoderModelFactoryBase<TInputValue,TOutputValue, NeuronType>::AutoencoderModelFactoryBase()
+{
+
+  std::string classOverride = std::string("DimensionalityReductionModel");
+  std::string subclass = std::string("AutoencoderModel");
+
+  this->RegisterOverride(classOverride.c_str(),
+                         subclass.c_str(),
+                         "Shark AE ML Model",
+                         1,
+                      //   itk::CreateObjectFunction<AutoencoderModel<TInputValue,TOutputValue> >::New());
+						itk::CreateObjectFunction<AutoencoderModel<TInputValue,NeuronType > >::New());
+}
+
+template <class TInputValue, class TOutputValue, class NeuronType>
+AutoencoderModelFactoryBase<TInputValue,TOutputValue, NeuronType>::~AutoencoderModelFactoryBase()
+{
+}
+
+template <class TInputValue, class TOutputValue, class NeuronType>
+const char* AutoencoderModelFactoryBase<TInputValue,TOutputValue, NeuronType>::GetITKSourceVersion(void) const
+{
+  return ITK_SOURCE_VERSION;
+}
+
+template <class TInputValue, class TOutputValue, class NeuronType>
+const char* AutoencoderModelFactoryBase<TInputValue,TOutputValue, NeuronType>::GetDescription() const
+{
+  return "Autoencoder model factory";
+}
+
+} // end namespace otb
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/DimensionalityReductionModelFactory.h b/Modules/Learning/DimensionalityReduction/include/DimensionalityReductionModelFactory.h
new file mode 100644
index 0000000000000000000000000000000000000000..16fa0f416d51a0e1b39465664a698d40cb1d4069
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/DimensionalityReductionModelFactory.h
@@ -0,0 +1,84 @@
+/*=========================================================================
+
+  Program:   ORFEO Toolbox
+  Language:  C++
+  Date:      $Date$
+  Version:   $Revision$
+
+
+  Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+  See OTBCopyright.txt for details.
+
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notices for more information.
+
+=========================================================================*/
+#ifndef DimensionalityReductionModelFactory_h
+#define DimensionalityReductionModelFactory_h
+
+//#include "DimensionalityReductionModel.h"
+#include "otbMachineLearningModelFactoryBase.h"
+ 
+#include "otbMachineLearningModel.h" 
+
+namespace otb
+{
+/** \class MachineLearningModelFactory
+ * \brief Creation of object instance using object factory.
+ *
+ * \ingroup OTBSupervised
+ */
+template <class TInputValue, class TOutputValue>
+class DimensionalityReductionModelFactory : public MachineLearningModelFactoryBase
+{
+public:
+  /** Standard class typedefs. */
+  typedef DimensionalityReductionModelFactory                Self;
+  typedef itk::Object           Superclass;
+  typedef itk::SmartPointer<Self>       Pointer;
+  typedef itk::SmartPointer<const Self> ConstPointer;
+
+  /** Class Methods used to interface with the registered factories */
+
+  /** Run-time type information (and related methods). */
+  itkTypeMacro(DimensionalityReductionModelFactory, itk::Object);
+
+  /** Convenient typedefs. */
+  typedef otb::MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TOutputValue>> DimensionalityReductionModelType;
+  typedef typename DimensionalityReductionModelType::Pointer DimensionalityReductionModelTypePointer;
+
+  /** Mode in which the files is intended to be used */
+  typedef enum { ReadMode, WriteMode } FileModeType;
+
+
+  /** Create the appropriate MachineLearningModel depending on the particulars of the file. */
+  static DimensionalityReductionModelTypePointer CreateDimensionalityReductionModel(const std::string& path, FileModeType mode);
+
+  static void CleanFactories();
+
+protected:
+  DimensionalityReductionModelFactory();
+  ~DimensionalityReductionModelFactory() ITK_OVERRIDE;
+
+private:
+  DimensionalityReductionModelFactory(const Self &); //purposely not implemented
+  void operator =(const Self&); //purposely not implemented
+
+  /** Register Built-in factories */
+  static void RegisterBuiltInFactories();
+
+  /** Register a single factory, ensuring it has not been registered
+    * twice */
+  static void RegisterFactory(itk::ObjectFactoryBase * factory);
+
+};
+
+} // end namespace otb
+
+#ifndef OTB_MANUAL_INSTANTIATION
+#include "DimensionalityReductionModelFactory.txx"
+#endif
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/DimensionalityReductionModelFactory.txx b/Modules/Learning/DimensionalityReduction/include/DimensionalityReductionModelFactory.txx
new file mode 100644
index 0000000000000000000000000000000000000000..130dd1362663abec152086e566c76038420246a6
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/DimensionalityReductionModelFactory.txx
@@ -0,0 +1,233 @@
+/*=========================================================================
+
+  Program:   ORFEO Toolbox
+  Language:  C++
+  Date:      $Date$
+  Version:   $Revision$
+
+
+  Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+  See OTBCopyright.txt for details.
+
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notices for more information.
+
+=========================================================================*/
+#ifndef DimensionalityReductionModelFactory_txx
+#define DimensionalityReductionFactory_txx
+
+#include "DimensionalityReductionModelFactory.h"
+#include "otbConfigure.h"
+
+#include "SOMModelFactory.h"
+
+#ifdef OTB_USE_SHARK
+#include "AutoencoderModelFactory.h"
+#include "PCAModelFactory.h"
+#endif
+
+#include "itkMutexLockHolder.h"
+
+
+namespace otb
+{
+
+/*
+template <class TInputValue, class TTargetValue>
+// using AutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::Autoencoder<shark::TanhNeuron, shark::LinearNeuron>>  ;
+using AutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::Autoencoder<shark::TanhNeuron, shark::TanhNeuron>>  ;
+
+
+template <class TInputValue, class TTargetValue>
+// using TiedAutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::TiedAutoencoder< shark::TanhNeuron, shark::LinearNeuron>>  ;
+using TiedAutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::TiedAutoencoder< shark::TanhNeuron, shark::TanhNeuron>>  ;
+*/
+
+template <class TInputValue, class TTargetValue>
+using AutoencoderModelFactory = AutoencoderModelFactoryBase<TInputValue, TTargetValue, shark::LogisticNeuron>  ;
+
+
+template <class TInputValue, class TTargetValue>
+using SOM2DModelFactory = SOMModelFactory<TInputValue, TTargetValue, 2>  ;
+
+template <class TInputValue, class TTargetValue>
+using SOM3DModelFactory = SOMModelFactory<TInputValue, TTargetValue, 3>  ;
+
+template <class TInputValue, class TTargetValue>
+using SOM4DModelFactory = SOMModelFactory<TInputValue, TTargetValue, 4>  ;
+
+template <class TInputValue, class TTargetValue>
+using SOM5DModelFactory = SOMModelFactory<TInputValue, TTargetValue, 5>  ;
+
+
+template <class TInputValue, class TOutputValue>
+typename MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TOutputValue>>::Pointer
+DimensionalityReductionModelFactory<TInputValue,TOutputValue>
+::CreateDimensionalityReductionModel(const std::string& path, FileModeType mode)
+{
+  RegisterBuiltInFactories();
+
+  std::list<DimensionalityReductionModelTypePointer> possibleDimensionalityReductionModel;
+  std::list<LightObject::Pointer> allobjects =
+    itk::ObjectFactoryBase::CreateAllInstance("DimensionalityReductionModel");
+ 
+  for(std::list<LightObject::Pointer>::iterator i = allobjects.begin();
+      i != allobjects.end(); ++i)
+    {
+    MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TOutputValue>> * io = dynamic_cast<MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TOutputValue>>*>(i->GetPointer());
+    if(io)
+      {
+      possibleDimensionalityReductionModel.push_back(io);
+      }
+    else
+      {
+	
+      std::cerr << "Error DimensionalityReductionModel Factory did not return an DimensionalityReductionModel: "
+                << (*i)->GetNameOfClass()
+                << std::endl;
+      }
+    }
+  
+for(typename std::list<DimensionalityReductionModelTypePointer>::iterator k = possibleDimensionalityReductionModel.begin();
+      k != possibleDimensionalityReductionModel.end(); ++k)
+    {
+      if( mode == ReadMode )
+      {
+		
+      if((*k)->CanReadFile(path))
+        {
+        return *k;
+        }
+      }
+    else if( mode == WriteMode )
+      {
+      if((*k)->CanWriteFile(path))
+        {
+        return *k;
+        }
+
+      }
+    }
+  return ITK_NULLPTR;
+}
+
+template <class TInputValue, class TOutputValue>
+void
+DimensionalityReductionModelFactory<TInputValue,TOutputValue>
+::RegisterBuiltInFactories()
+{
+  itk::MutexLockHolder<itk::SimpleMutexLock> lockHolder(mutex);
+  
+  
+
+  RegisterFactory(SOM2DModelFactory<TInputValue,TOutputValue>::New());
+  RegisterFactory(SOM3DModelFactory<TInputValue,TOutputValue>::New());
+  RegisterFactory(SOM4DModelFactory<TInputValue,TOutputValue>::New());
+  RegisterFactory(SOM5DModelFactory<TInputValue,TOutputValue>::New());
+  
+#ifdef OTB_USE_SHARK
+  RegisterFactory(PCAModelFactory<TInputValue,TOutputValue>::New());
+  RegisterFactory(AutoencoderModelFactory<TInputValue,TOutputValue>::New());
+ // RegisterFactory(TiedAutoencoderModelFactory<TInputValue,TOutputValue>::New());
+#endif
+  
+}
+
+template <class TInputValue, class TOutputValue>
+void
+DimensionalityReductionModelFactory<TInputValue,TOutputValue>
+::RegisterFactory(itk::ObjectFactoryBase * factory)
+{
+  // Unregister any previously registered factory of the same class
+  // Might be more intensive but static bool is not an option due to
+  // ld error.
+  itk::ObjectFactoryBase::UnRegisterFactory(factory);
+  itk::ObjectFactoryBase::RegisterFactory(factory);
+}
+
+template <class TInputValue, class TOutputValue>
+void
+DimensionalityReductionModelFactory<TInputValue,TOutputValue>
+::CleanFactories()
+{
+  itk::MutexLockHolder<itk::SimpleMutexLock> lockHolder(mutex);
+
+  std::list<itk::ObjectFactoryBase*> factories = itk::ObjectFactoryBase::GetRegisteredFactories();
+  std::list<itk::ObjectFactoryBase*>::iterator itFac;
+
+  for (itFac = factories.begin(); itFac != factories.end() ; ++itFac)
+    {
+
+	// SOM
+	
+	SOM5DModelFactory<TInputValue,TOutputValue> *som5dFactory =
+      dynamic_cast<SOM5DModelFactory<TInputValue,TOutputValue> *>(*itFac);
+    if (som5dFactory)
+      {
+      itk::ObjectFactoryBase::UnRegisterFactory(som5dFactory);
+      continue;
+      }
+    
+    SOM4DModelFactory<TInputValue,TOutputValue> *som4dFactory =
+      dynamic_cast<SOM4DModelFactory<TInputValue,TOutputValue> *>(*itFac);
+    if (som4dFactory)
+      {
+      itk::ObjectFactoryBase::UnRegisterFactory(som4dFactory);
+      continue;
+      }
+      
+    SOM3DModelFactory<TInputValue,TOutputValue> *som3dFactory =
+      dynamic_cast<SOM3DModelFactory<TInputValue,TOutputValue> *>(*itFac);
+    if (som3dFactory)
+      {
+      itk::ObjectFactoryBase::UnRegisterFactory(som3dFactory);
+      continue;
+      }
+      
+    SOM2DModelFactory<TInputValue,TOutputValue> *som2dFactory =
+      dynamic_cast<SOM2DModelFactory<TInputValue,TOutputValue> *>(*itFac);
+    if (som2dFactory)
+      {
+      itk::ObjectFactoryBase::UnRegisterFactory(som2dFactory);
+      continue;
+      }
+      
+#ifdef OTB_USE_SHARK
+	
+	// Autoencoder
+	AutoencoderModelFactory<TInputValue,TOutputValue> *aeFactory =
+      dynamic_cast<AutoencoderModelFactory<TInputValue,TOutputValue> *>(*itFac);
+    if (aeFactory)
+      {
+      itk::ObjectFactoryBase::UnRegisterFactory(aeFactory);
+      continue;
+      }
+    
+    /*
+    TiedAutoencoderModelFactory<TInputValue,TOutputValue> *taeFactory =
+      dynamic_cast<TiedAutoencoderModelFactory<TInputValue,TOutputValue> *>(*itFac);
+    if (taeFactory)
+      {
+      itk::ObjectFactoryBase::UnRegisterFactory(taeFactory);
+      continue;
+      }
+    */
+    // PCA  
+    PCAModelFactory<TInputValue,TOutputValue> *pcaFactory =
+      dynamic_cast<PCAModelFactory<TInputValue,TOutputValue> *>(*itFac);
+    if (pcaFactory)
+      {
+      itk::ObjectFactoryBase::UnRegisterFactory(pcaFactory);
+      continue;
+      }
+#endif
+
+    }
+
+}
+
+} // end namespace otb
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/ImageDimensionalityReductionFilter.h b/Modules/Learning/DimensionalityReduction/include/ImageDimensionalityReductionFilter.h
new file mode 100644
index 0000000000000000000000000000000000000000..008c57d2a4c65d77b762e262a2484d441d25d766
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/ImageDimensionalityReductionFilter.h
@@ -0,0 +1,145 @@
+/*=========================================================================
+
+  Program:   ORFEO Toolbox
+  Language:  C++
+  Date:      $Date$
+  Version:   $Revision$
+
+
+  Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+  See OTBCopyright.txt for details.
+
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notices for more information.
+
+=========================================================================*/
+#ifndef ImageDimensionalityReduction_h
+#define ImageDimensionalityReduction_h
+
+#include "itkImageToImageFilter.h"
+//#include "DimensionalityReductionModel.h"
+#include "otbMachineLearningModel.h"
+#include "otbImage.h"
+
+namespace otb
+{
+/** \class ImageClassificationFilter
+ *  \brief This filter performs the classification of a VectorImage using a Model.
+ *
+ *  This filter is streamed and threaded, allowing to classify huge images
+ *  while fully using several core.
+ *
+ * \sa Classifier
+ * \ingroup Streamed
+ * \ingroup Threaded
+ *
+ * \ingroup OTBSupervised
+ */
+template <class TInputImage, class TOutputImage, class TMaskImage = TOutputImage>
+class ITK_EXPORT ImageDimensionalityReductionFilter
+  : public itk::ImageToImageFilter<TInputImage, TOutputImage>
+{
+public:
+  /** Standard typedefs */
+  typedef ImageDimensionalityReductionFilter                       Self;
+  typedef itk::ImageToImageFilter<TInputImage, TOutputImage> Superclass;
+  typedef itk::SmartPointer<Self>                            Pointer;
+  typedef itk::SmartPointer<const Self>                      ConstPointer;
+
+  /** Type macro */
+  itkNewMacro(Self);
+
+  /** Creation through object factory macro */
+  itkTypeMacro(ImageDimensionalityReductionFilter, ImageToImageFilter);
+
+  typedef TInputImage                                InputImageType;
+  typedef typename InputImageType::ConstPointer      InputImageConstPointerType;
+  typedef typename InputImageType::InternalPixelType ValueType;
+
+  typedef TMaskImage                           MaskImageType;
+  typedef typename MaskImageType::ConstPointer MaskImageConstPointerType;
+  typedef typename MaskImageType::Pointer      MaskImagePointerType;
+
+  typedef TOutputImage                         OutputImageType;
+  typedef typename OutputImageType::Pointer    OutputImagePointerType;
+  typedef typename OutputImageType::RegionType OutputImageRegionType;
+  typedef typename OutputImageType::InternalPixelType  LabelType;
+
+  typedef MachineLearningModel<itk::VariableLengthVector<ValueType>, itk::VariableLengthVector<LabelType>> ModelType;
+  typedef typename ModelType::Pointer                ModelPointerType;
+
+  typedef otb::Image<double>                    ConfidenceImageType;
+  typedef typename ConfidenceImageType::Pointer ConfidenceImagePointerType;
+
+  /** Set/Get the model */
+  itkSetObjectMacro(Model, ModelType);
+  itkGetObjectMacro(Model, ModelType);
+
+  /** Set/Get the default label */
+  itkSetMacro(DefaultLabel, LabelType);
+  itkGetMacro(DefaultLabel, LabelType);
+
+  /** Set/Get the confidence map flag */
+  itkSetMacro(UseConfidenceMap, bool);
+  itkGetMacro(UseConfidenceMap, bool);
+
+  itkSetMacro(BatchMode, bool);
+  itkGetMacro(BatchMode, bool);
+  itkBooleanMacro(BatchMode);
+  
+  /**
+   * If set, only pixels within the mask will be classified.
+   * All pixels with a value greater than 0 in the mask, will be classified.
+   * \param mask The input mask.
+   */
+  void SetInputMask(const MaskImageType * mask);
+
+  /**
+   * Get the input mask.
+   * \return The mask.
+   */
+  const MaskImageType * GetInputMask(void);
+
+  /**
+   * Get the output confidence map
+   */
+  ConfidenceImageType * GetOutputConfidence(void);
+
+protected:
+  /** Constructor */
+  ImageDimensionalityReductionFilter();
+  /** Destructor */
+  ~ImageDimensionalityReductionFilter() ITK_OVERRIDE {}
+
+  /** Generate output information */
+  virtual void GenerateOutputInformation();
+
+  /** Threaded generate data */  
+  void ThreadedGenerateData(const OutputImageRegionType& outputRegionForThread, itk::ThreadIdType threadId) ITK_OVERRIDE;
+  void ClassicThreadedGenerateData(const OutputImageRegionType& outputRegionForThread, itk::ThreadIdType threadId);
+  void BatchThreadedGenerateData(const OutputImageRegionType& outputRegionForThread, itk::ThreadIdType threadId);
+  /** Before threaded generate data */
+  void BeforeThreadedGenerateData() ITK_OVERRIDE;
+  /**PrintSelf method */
+  void PrintSelf(std::ostream& os, itk::Indent indent) const ITK_OVERRIDE;
+
+private:
+  ImageDimensionalityReductionFilter(const Self &); //purposely not implemented
+  void operator =(const Self&); //purposely not implemented
+
+  /** The model used for classification */
+  ModelPointerType m_Model;
+  /** Default label for invalid pixels (when using a mask) */
+  LabelType m_DefaultLabel;
+  /** Flag to produce the confidence map (if the model supports it) */
+  bool m_UseConfidenceMap;
+  bool m_BatchMode;
+};
+} // End namespace otb
+#ifndef OTB_MANUAL_INSTANTIATION
+#include "ImageDimensionalityReductionFilter.txx"
+#endif
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/ImageDimensionalityReductionFilter.txx b/Modules/Learning/DimensionalityReduction/include/ImageDimensionalityReductionFilter.txx
new file mode 100644
index 0000000000000000000000000000000000000000..e98a0b79f753624246d732d01e60a47b43f2595a
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/ImageDimensionalityReductionFilter.txx
@@ -0,0 +1,234 @@
+/*=========================================================================
+
+  Program:   ORFEO Toolbox
+  Language:  C++
+  Date:      $Date$
+  Version:   $Revision$
+
+
+  Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+  See OTBCopyright.txt for details.
+
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notices for more information.
+
+=========================================================================*/
+#ifndef otbImageClassificationFilter_txx
+#define otbImageClassificationFilter_txx
+
+#include "ImageDimensionalityReductionFilter.h"
+#include "itkImageRegionIterator.h"
+#include "itkProgressReporter.h"
+
+namespace otb
+{
+/**
+ * Constructor
+ */
+template <class TInputImage, class TOutputImage, class TMaskImage>
+ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::ImageDimensionalityReductionFilter()
+{
+  this->SetNumberOfIndexedInputs(2);
+  this->SetNumberOfRequiredInputs(1);
+
+  //m_DefaultLabel = itk::NumericTraits<LabelType>::ZeroValue();
+
+  this->SetNumberOfRequiredOutputs(2);
+  this->SetNthOutput(0,TOutputImage::New());
+  this->SetNthOutput(1,ConfidenceImageType::New());
+  m_UseConfidenceMap = false;
+  m_BatchMode = true;
+}
+
+template <class TInputImage, class TOutputImage, class TMaskImage>
+void
+ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::SetInputMask(const MaskImageType * mask)
+{
+  this->itk::ProcessObject::SetNthInput(1, const_cast<MaskImageType *>(mask));
+}
+
+template <class TInputImage, class TOutputImage, class TMaskImage>
+const typename ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::MaskImageType *
+ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::GetInputMask()
+{
+  if (this->GetNumberOfInputs() < 2)
+    {
+    return ITK_NULLPTR;
+    }
+  return static_cast<const MaskImageType *>(this->itk::ProcessObject::GetInput(1));
+}
+
+template <class TInputImage, class TOutputImage, class TMaskImage>
+typename ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::ConfidenceImageType *
+ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::GetOutputConfidence()
+{
+  if (this->GetNumberOfOutputs() < 2)
+    {
+    return ITK_NULLPTR;
+    }
+  return static_cast<ConfidenceImageType *>(this->itk::ProcessObject::GetOutput(1));
+}
+
+template <class TInputImage, class TOutputImage, class TMaskImage>
+void
+ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::BeforeThreadedGenerateData()
+{
+  if (!m_Model)
+    {
+    itkGenericExceptionMacro(<< "No model for classification");
+    }
+  if(m_BatchMode)
+    {
+    #ifdef _OPENMP
+    // OpenMP will take care of threading
+    this->SetNumberOfThreads(1);
+    #endif
+    }
+}
+
+template <class TInputImage, class TOutputImage, class TMaskImage>
+void
+ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::ClassicThreadedGenerateData(const OutputImageRegionType& outputRegionForThread, itk::ThreadIdType threadId)
+{
+  // Get the input pointers
+  InputImageConstPointerType inputPtr     = this->GetInput();
+  MaskImageConstPointerType  inputMaskPtr  = this->GetInputMask();
+  OutputImagePointerType     outputPtr    = this->GetOutput();
+  ConfidenceImagePointerType confidencePtr = this->GetOutputConfidence();
+
+  // Progress reporting
+  itk::ProgressReporter progress(this, threadId, outputRegionForThread.GetNumberOfPixels());
+
+  // Define iterators
+  typedef itk::ImageRegionConstIterator<InputImageType> InputIteratorType;
+  typedef itk::ImageRegionConstIterator<MaskImageType>  MaskIteratorType;
+  typedef itk::ImageRegionIterator<OutputImageType>     OutputIteratorType;
+  typedef itk::ImageRegionIterator<ConfidenceImageType> ConfidenceMapIteratorType;
+
+  InputIteratorType inIt(inputPtr, outputRegionForThread);
+  OutputIteratorType outIt(outputPtr, outputRegionForThread);
+
+  // Walk the part of the image
+  for (inIt.GoToBegin(), outIt.GoToBegin(); !inIt.IsAtEnd() && !outIt.IsAtEnd(); ++inIt, ++outIt)
+    {
+	// Classifify
+
+	outIt.Set(m_Model->Predict(inIt.Get()));
+    progress.CompletedPixel();
+    }
+
+}
+
+template <class TInputImage, class TOutputImage, class TMaskImage>
+void ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>::GenerateOutputInformation()
+{
+	Superclass::GenerateOutputInformation();
+    this->GetOutput()->SetNumberOfComponentsPerPixel( m_Model->GetDimension() );
+}
+
+
+
+template <class TInputImage, class TOutputImage, class TMaskImage>
+void
+ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::BatchThreadedGenerateData(const OutputImageRegionType& outputRegionForThread, itk::ThreadIdType threadId)
+{
+
+  // Get the input pointers
+  InputImageConstPointerType inputPtr     = this->GetInput();
+  MaskImageConstPointerType  inputMaskPtr  = this->GetInputMask();
+  OutputImagePointerType     outputPtr    = this->GetOutput();
+  ConfidenceImagePointerType confidencePtr = this->GetOutputConfidence();
+  
+  // Progress reporting
+  itk::ProgressReporter progress(this, threadId, outputRegionForThread.GetNumberOfPixels());
+
+  // Define iterators
+  typedef itk::ImageRegionConstIterator<InputImageType> InputIteratorType;
+  typedef itk::ImageRegionConstIterator<MaskImageType>  MaskIteratorType;
+  typedef itk::ImageRegionIterator<OutputImageType>     OutputIteratorType;
+  typedef itk::ImageRegionIterator<ConfidenceImageType> ConfidenceMapIteratorType;
+
+  InputIteratorType inIt(inputPtr, outputRegionForThread);
+  OutputIteratorType outIt(outputPtr, outputRegionForThread);
+
+  typedef typename ModelType::InputSampleType      InputSampleType;
+  typedef typename ModelType::InputListSampleType  InputListSampleType;
+  typedef typename ModelType::TargetValueType      TargetValueType;
+  typedef typename ModelType::TargetListSampleType TargetListSampleType;
+  
+  typename InputListSampleType::Pointer samples = InputListSampleType::New();
+  unsigned int num_features = inputPtr->GetNumberOfComponentsPerPixel();
+  samples->SetMeasurementVectorSize(num_features);
+  InputSampleType sample(num_features);
+  // Fill the samples
+  
+  for (inIt.GoToBegin(); !inIt.IsAtEnd(); ++inIt)
+    {
+    
+     typename InputImageType::PixelType pix = inIt.Get();
+     for(size_t feat=0; feat<num_features; ++feat)
+        {
+        sample[feat]=pix[feat];
+        }
+      samples->PushBack(sample);
+      
+    }
+  //Make the batch prediction
+  typename TargetListSampleType::Pointer labels;
+ 
+  // This call is threadsafe
+  labels = m_Model->PredictBatch(samples);
+
+  // Set the output values
+ 
+  typename TargetListSampleType::ConstIterator labIt = labels->Begin();
+ 
+  for (outIt.GoToBegin(); !outIt.IsAtEnd(); ++outIt)
+    {
+
+	itk::VariableLengthVector<TargetValueType> labelValue;
+  
+    labelValue = labIt.GetMeasurementVector();
+    ++labIt;    
+    outIt.Set(labelValue);
+    progress.CompletedPixel();
+    }
+}
+template <class TInputImage, class TOutputImage, class TMaskImage>
+void
+ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::ThreadedGenerateData(const OutputImageRegionType& outputRegionForThread, itk::ThreadIdType threadId)
+{
+  if(m_BatchMode)
+    {
+    this->BatchThreadedGenerateData(outputRegionForThread, threadId);
+    }
+  else
+    {
+    this->ClassicThreadedGenerateData(outputRegionForThread, threadId);
+    }
+
+}
+/**
+ * PrintSelf Method
+ */
+template <class TInputImage, class TOutputImage, class TMaskImage>
+void
+ImageDimensionalityReductionFilter<TInputImage, TOutputImage, TMaskImage>
+::PrintSelf(std::ostream& os, itk::Indent indent) const
+{
+  Superclass::PrintSelf(os, indent);
+}
+} // End namespace otb
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/PCAModel.h b/Modules/Learning/DimensionalityReduction/include/PCAModel.h
new file mode 100644
index 0000000000000000000000000000000000000000..82ea0f8443a4caadc49bb6e66e91928a8cbf6a7b
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/PCAModel.h
@@ -0,0 +1,83 @@
+#ifndef PCAModel_h
+#define PCAModel_h
+
+#include "otbMachineLearningModelTraits.h"
+#include "otbMachineLearningModel.h"
+
+#include <shark/Algorithms/Trainers/PCA.h>
+
+namespace otb
+{
+template <class TInputValue>
+class ITK_EXPORT PCAModel: public  MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>>    
+{
+
+public:
+	
+	typedef PCAModel Self;
+	typedef MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>> Superclass;
+	typedef itk::SmartPointer<Self> Pointer;
+	typedef itk::SmartPointer<const Self> ConstPointer;
+
+	typedef typename Superclass::InputValueType 			InputValueType;
+	typedef typename Superclass::InputSampleType 			InputSampleType;
+	typedef typename Superclass::InputListSampleType 		InputListSampleType;
+	typedef typename InputListSampleType::Pointer 			ListSamplePointerType;
+	typedef typename Superclass::TargetValueType 			TargetValueType;
+	typedef typename Superclass::TargetSampleType 			TargetSampleType;
+	typedef typename Superclass::TargetListSampleType 		TargetListSampleType;
+
+	/// Confidence map related typedefs
+	
+	typedef typename Superclass::ConfidenceValueType  				ConfidenceValueType;
+	typedef typename Superclass::ConfidenceSampleType 				ConfidenceSampleType;
+	typedef typename Superclass::ConfidenceListSampleType      		ConfidenceListSampleType;
+
+
+	itkNewMacro(Self);
+	itkTypeMacro(PCAModel, DimensionalityReductionModel);
+/*
+	unsigned int GetDimension() {return m_Dimension;}; 
+	itkSetMacro(Dimension,unsigned int);
+	*/
+	itkSetMacro(Do_resize_flag,bool);
+	
+	itkSetMacro(WriteEigenvectors, bool);
+	itkGetMacro(WriteEigenvectors, bool);
+	
+	bool CanReadFile(const std::string & filename);
+	bool CanWriteFile(const std::string & filename);
+
+	void Save(const std::string & filename, const std::string & name="")  ITK_OVERRIDE;
+	void Load(const std::string & filename, const std::string & name="")  ITK_OVERRIDE;
+
+	void Train() ITK_OVERRIDE;
+	//void Dimensionality_reduction()  {}; // Dimensionality reduction is done by DoPredict
+	 
+
+protected:
+	PCAModel();	
+	~PCAModel() ITK_OVERRIDE;
+ 
+	virtual TargetSampleType DoPredict(const InputSampleType& input, ConfidenceValueType * quality = ITK_NULLPTR) const;
+
+	virtual void DoPredictBatch(const InputListSampleType *, const unsigned int & startIndex, const unsigned int & size, TargetListSampleType *, ConfidenceListSampleType * quality = ITK_NULLPTR) const ITK_OVERRIDE;
+  
+private:
+	shark::LinearModel<> m_encoder;
+	shark::LinearModel<> m_decoder;
+	shark::PCA m_pca;
+	//unsigned int m_Dimension;
+	bool m_Do_resize_flag;
+	bool m_WriteEigenvectors;
+};
+} // end namespace otb
+
+
+#ifndef OTB_MANUAL_INSTANTIATION
+#include "PCAModel.txx"
+#endif
+
+
+#endif
+
diff --git a/Modules/Learning/DimensionalityReduction/include/PCAModel.txx b/Modules/Learning/DimensionalityReduction/include/PCAModel.txx
new file mode 100644
index 0000000000000000000000000000000000000000..364dd8def35fa51373f3b0cf30e927622044d736
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/PCAModel.txx
@@ -0,0 +1,188 @@
+
+#ifndef PCAModel_txx
+#define PCAModel_txx
+
+#include <fstream>
+#include <shark/Data/Dataset.h>
+#include "itkMacro.h"
+#include "otbSharkUtils.h"
+//include train function
+#include <shark/ObjectiveFunctions/ErrorFunction.h>
+#include <shark/Algorithms/GradientDescent/Rprop.h>// the RProp optimization algorithm
+#include <shark/ObjectiveFunctions/Loss/SquaredLoss.h> // squared loss used for regression
+#include <shark/ObjectiveFunctions/Regularizer.h> //L2 regulariziation
+
+#include <shark/ObjectiveFunctions/ErrorFunction.h>
+
+namespace otb
+{
+
+
+template <class TInputValue>
+PCAModel<TInputValue>::PCAModel()
+{
+	this->m_IsDoPredictBatchMultiThreaded = true;
+	this->m_Dimension = 0;
+}
+
+
+template <class TInputValue>
+PCAModel<TInputValue>::~PCAModel()
+{
+}
+
+
+template <class TInputValue>
+void PCAModel<TInputValue>::Train()
+{
+	
+	std::vector<shark::RealVector> features;
+	
+	Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
+	
+	shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features );
+	m_pca.setData(inputSamples);
+	m_pca.encoder(m_encoder, this->m_Dimension);
+	m_pca.decoder(m_decoder, this->m_Dimension);
+	
+}
+
+
+template <class TInputValue>
+bool PCAModel<TInputValue>::CanReadFile(const std::string & filename)
+{
+	try
+	{
+		this->Load(filename);
+		m_encoder.name();
+	}
+	catch(...)
+	{
+	return false;
+	}
+	return true;
+}
+
+
+template <class TInputValue>
+bool PCAModel<TInputValue>::CanWriteFile(const std::string & filename)
+{
+	return true;
+}
+
+template <class TInputValue>
+void PCAModel<TInputValue>::Save(const std::string & filename, const std::string & name)
+{
+	std::ofstream ofs(filename);
+	//ofs << m_encoder.name() << std::endl; //first line
+	ofs << "pca" << std::endl; //first line
+	boost::archive::polymorphic_text_oarchive oa(ofs);
+	m_encoder.write(oa);
+	ofs.close();
+	
+	if (this->m_WriteEigenvectors == true)     // output the map vectors in a txt file
+	{
+		std::ofstream otxt(filename+".txt");
+		
+		otxt << "Eigenvectors : " << m_pca.eigenvectors() << std::endl;
+		otxt << "Eigenvalues : " << m_pca.eigenvalues() << std::endl;
+		
+		std::vector<shark::RealVector> features;
+	
+		shark::SquaredLoss<shark::RealVector> loss;
+		Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
+		shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features );
+		otxt << "Reconstruction error : " << loss.eval(inputSamples,m_decoder(m_encoder(inputSamples))) << std::endl;
+		std::cout << "Reconstruction error : " << loss.eval(inputSamples,m_decoder(m_encoder(inputSamples))) << std::endl;
+		otxt.close();
+	}
+}
+
+template <class TInputValue>
+void PCAModel<TInputValue>::Load(const std::string & filename, const std::string & name)
+{
+	std::ifstream ifs(filename);
+	char encoder[256];
+	ifs.getline(encoder,256); 
+	std::string encoderstr(encoder);
+	
+	//if (encoderstr != m_encoder.name()){
+	if (encoderstr != "pca"){
+		itkExceptionMacro(<< "Error opening " << filename.c_str() );
+    }
+	boost::archive::polymorphic_text_iarchive ia(ifs);
+	m_encoder.read(ia);
+	ifs.close();
+	if (this->m_Dimension ==0)
+	{
+		this->m_Dimension = m_encoder.outputSize();
+	}
+	
+	
+	auto eigenvectors = m_encoder.matrix();
+	eigenvectors.resize(this->m_Dimension,m_encoder.inputSize());
+	
+	m_encoder.setStructure(eigenvectors, m_encoder.offset() );
+
+	
+	
+}
+
+
+template <class TInputValue>
+typename PCAModel<TInputValue>::TargetSampleType
+PCAModel<TInputValue>::DoPredict(const InputSampleType & value, ConfidenceValueType * quality) const
+{  
+	shark::RealVector samples(value.Size());
+	for(size_t i = 0; i < value.Size();i++)
+    {
+		samples[i]=value[i];
+    }
+    
+    std::vector<shark::RealVector> features;
+    features.push_back(samples);
+   
+    shark::Data<shark::RealVector> data = shark::createDataFromRange(features);
+     
+	data = m_encoder(data);
+    TargetSampleType target;
+    target.SetSize(this->m_Dimension);
+	
+	for(unsigned int a = 0; a < this->m_Dimension; ++a){
+		target[a]=data.element(0)[a];
+	}
+	return target;
+}
+
+
+template <class TInputValue>
+void PCAModel<TInputValue>
+::DoPredictBatch(const InputListSampleType *input, const unsigned int & startIndex, const unsigned int & size, TargetListSampleType * targets, ConfidenceListSampleType * quality) const
+{
+	
+	std::vector<shark::RealVector> features;
+	Shark::ListSampleRangeToSharkVector(input, features,startIndex,size);
+	shark::Data<shark::RealVector> data = shark::createDataFromRange(features);
+	TargetSampleType target;
+	data = m_encoder(data);
+	unsigned int id = startIndex;
+	target.SetSize(this->m_Dimension);
+	for(const auto& p : data.elements()){
+		
+		for(unsigned int a = 0; a < this->m_Dimension; ++a){
+			target[a]=p[a];
+			//target[a]=1;
+		
+			//target.SetElement(a,p[a]);
+		}
+		//std::cout << p << std::endl;
+		targets->SetMeasurementVector(id,target);
+		++id;
+		
+    }
+
+}
+
+
+} // namespace otb
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/PCAModelFactory.h b/Modules/Learning/DimensionalityReduction/include/PCAModelFactory.h
new file mode 100644
index 0000000000000000000000000000000000000000..56c0b88c6c3d85414d8f0d95b93f64047d2fbd06
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/PCAModelFactory.h
@@ -0,0 +1,59 @@
+#ifndef PCAModelFactory_h
+#define PCAModelFactory_h
+
+
+#include "itkObjectFactoryBase.h"
+#include "itkImageIOBase.h"
+
+namespace otb
+{
+	
+template <class TInputValue, class TTargetValue>
+class ITK_EXPORT PCAModelFactory : public itk::ObjectFactoryBase
+{
+public:
+  /** Standard class typedefs. */
+  typedef PCAModelFactory   Self;
+  typedef itk::ObjectFactoryBase        Superclass;
+  typedef itk::SmartPointer<Self>       Pointer;
+  typedef itk::SmartPointer<const Self> ConstPointer;
+
+  /** Class methods used to interface with the registered factories. */
+  const char* GetITKSourceVersion(void) const ITK_OVERRIDE;
+  const char* GetDescription(void) const ITK_OVERRIDE;
+
+  /** Method for class instantiation. */
+  itkFactorylessNewMacro(Self);
+
+  /** Run-time type information (and related methods). */
+  itkTypeMacro(PCAModelFactory, itk::ObjectFactoryBase);
+
+  /** Register one factory of this type  */
+  static void RegisterOneFactory(void)
+  {
+    Pointer PCAFactory = PCAModelFactory::New();
+    itk::ObjectFactoryBase::RegisterFactory(PCAFactory);
+  }
+
+protected:
+  PCAModelFactory();
+  ~PCAModelFactory() ITK_OVERRIDE;
+
+private:
+  PCAModelFactory(const Self &); //purposely not implemented
+  void operator =(const Self&); //purposely not implemented
+
+};
+
+
+
+} //namespace otb
+
+
+#ifndef OTB_MANUAL_INSTANTIATION
+#include "PCAModelFactory.txx"
+#endif
+
+#endif
+
+
diff --git a/Modules/Learning/DimensionalityReduction/include/PCAModelFactory.txx b/Modules/Learning/DimensionalityReduction/include/PCAModelFactory.txx
new file mode 100644
index 0000000000000000000000000000000000000000..bfaa4f6f624b12d8351defb1bc52ad9a4d37253e
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/PCAModelFactory.txx
@@ -0,0 +1,65 @@
+/*=========================================================================
+
+  Program:   ORFEO Toolbox
+  Language:  C++
+  Date:      $Date$
+  Version:   $Revision$
+
+
+  Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+  See OTBCopyright.txt for details.
+
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notices for more information.
+
+=========================================================================*/
+#ifndef PCAFactory_txx
+#define PCAFactory_txx
+
+
+#include "PCAModelFactory.h"
+
+#include "itkCreateObjectFunction.h"
+#include "PCAModel.h"
+//#include <shark/Algorithms/Trainers/PCA.h>
+#include "itkVersion.h"
+
+namespace otb
+{
+template <class TInputValue, class TOutputValue>
+PCAModelFactory<TInputValue,TOutputValue>::PCAModelFactory()
+{
+
+  std::string classOverride = std::string("DimensionalityReductionModel");
+  std::string subclass = std::string("PCAModel");
+
+  this->RegisterOverride(classOverride.c_str(),
+                         subclass.c_str(),
+                         "Shark PCA ML Model",
+                         1,
+                      //   itk::CreateObjectFunction<AutoencoderModel<TInputValue,TOutputValue> >::New());
+						itk::CreateObjectFunction<PCAModel<TInputValue>>::New());
+}
+
+template <class TInputValue, class TOutputValue>
+PCAModelFactory<TInputValue,TOutputValue>::~PCAModelFactory()
+{
+}
+
+template <class TInputValue, class TOutputValue>
+const char* PCAModelFactory<TInputValue,TOutputValue>::GetITKSourceVersion(void) const
+{
+  return ITK_SOURCE_VERSION;
+}
+
+template <class TInputValue, class TOutputValue>
+const char* PCAModelFactory<TInputValue,TOutputValue>::GetDescription() const
+{
+  return "PCA model factory";
+}
+
+} // end namespace otb
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/SOMModel.h b/Modules/Learning/DimensionalityReduction/include/SOMModel.h
new file mode 100644
index 0000000000000000000000000000000000000000..c9553cd9c37e76d2a9c271c72c12caa4b0bd60b3
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/SOMModel.h
@@ -0,0 +1,140 @@
+#ifndef SOMModel_h
+#define SOMModel_h
+
+//#include "DimensionalityReductionModel.h"
+#include "otbSOMMap.h"
+
+#include "otbSOM.h"
+
+#include "itkEuclideanDistanceMetric.h" // the distance function
+
+#include "otbCzihoSOMLearningBehaviorFunctor.h"
+#include "otbCzihoSOMNeighborhoodBehaviorFunctor.h"
+
+#include "otbMachineLearningModelTraits.h"
+#include "otbMachineLearningModel.h"
+
+namespace otb
+{
+template <class TInputValue, unsigned int MapDimension>
+class ITK_EXPORT SOMModel: public  MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>>   
+{
+
+public:
+	
+	typedef SOMModel Self;
+	typedef MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>> Superclass;
+	typedef itk::SmartPointer<Self> Pointer;
+	typedef itk::SmartPointer<const Self> ConstPointer;
+
+	typedef typename Superclass::InputValueType 			InputValueType;
+	typedef typename Superclass::InputSampleType 			InputSampleType;
+	typedef typename Superclass::InputListSampleType 		InputListSampleType;
+	typedef typename InputListSampleType::Pointer 			ListSamplePointerType;
+	typedef typename Superclass::TargetValueType 			TargetValueType;
+	typedef typename Superclass::TargetSampleType 			TargetSampleType;
+	typedef typename Superclass::TargetListSampleType 		TargetListSampleType;
+
+	/// Confidence map related typedefs
+	
+	typedef typename Superclass::ConfidenceValueType  				ConfidenceValueType;
+	typedef typename Superclass::ConfidenceSampleType 				ConfidenceSampleType;
+	typedef typename Superclass::ConfidenceListSampleType      		ConfidenceListSampleType;
+
+
+
+	typedef SOMMap<itk::VariableLengthVector<TInputValue>,itk::Statistics::EuclideanDistanceMetric<itk::VariableLengthVector<TInputValue>>, MapDimension> MapType;
+	typedef typename MapType::SizeType       				SizeType;
+	typedef typename MapType::SpacingType       			SpacingType;
+	//typedef otb::SOM<InputListSampleType, MapType> 			EstimatorType;
+	typedef otb::SOM<InputListSampleType, MapType> 			EstimatorType;
+
+	typedef Functor::CzihoSOMLearningBehaviorFunctor    	SOMLearningBehaviorFunctorType;
+	typedef Functor::CzihoSOMNeighborhoodBehaviorFunctor 	SOMNeighborhoodBehaviorFunctorType;
+
+	itkNewMacro(Self);
+	itkTypeMacro(SOMModel, DimensionalityReductionModel);
+	 
+	/** Accessors */
+	itkSetMacro(NumberOfIterations, unsigned int);
+	itkGetMacro(NumberOfIterations, unsigned int);
+    itkSetMacro(BetaInit, double);
+	itkGetMacro(BetaInit, double);
+	itkSetMacro(WriteMap, bool);
+	itkGetMacro(WriteMap, bool);
+	itkSetMacro(BetaEnd, double);
+	itkGetMacro(BetaEnd, double);
+	itkSetMacro(MinWeight, InputValueType);
+	itkGetMacro(MinWeight, InputValueType);
+	itkSetMacro(MaxWeight, InputValueType);
+	itkGetMacro(MaxWeight, InputValueType);
+	itkSetMacro(MapSize, SizeType);
+	itkGetMacro(MapSize, SizeType);
+	itkSetMacro(NeighborhoodSizeInit, SizeType);
+	itkGetMacro(NeighborhoodSizeInit, SizeType);
+	itkSetMacro(RandomInit, bool);
+	itkGetMacro(RandomInit, bool);
+	itkSetMacro(Seed, unsigned int);
+	itkGetMacro(Seed, unsigned int);
+	itkGetObjectMacro(ListSample, InputListSampleType);
+	itkSetObjectMacro(ListSample, InputListSampleType);
+
+	bool CanReadFile(const std::string & filename);
+	bool CanWriteFile(const std::string & filename);
+
+	void Save(const std::string & filename, const std::string & name="") ;
+	void Load(const std::string & filename, const std::string & name="") ;
+
+	void Train() ITK_OVERRIDE;
+	//void Dimensionality_reduction()  {}; // Dimensionality reduction is done by DoPredict
+	 
+	//unsigned int GetDimension() { return MapType::ImageDimension;};
+protected:
+	SOMModel();	
+	~SOMModel() ITK_OVERRIDE;
+ 
+private:
+	typename MapType::Pointer m_SOMMap;
+	
+	virtual TargetSampleType DoPredict(const InputSampleType& input, ConfidenceValueType * quality = ITK_NULLPTR) const;
+
+	/** Map Parameters used for training */
+  
+	SizeType m_MapSize;
+	/** Number of iterations */
+	unsigned int m_NumberOfIterations;
+	/** Initial learning coefficient */
+	double m_BetaInit;
+	/** Final learning coefficient */
+	double m_BetaEnd;
+	/** Initial neighborhood size */
+	SizeType m_NeighborhoodSizeInit;
+	/** Minimum initial neuron weights */
+	InputValueType m_MinWeight;
+	/** Maximum initial neuron weights */
+	InputValueType m_MaxWeight;
+	/** Random initialization bool */
+	bool m_RandomInit;
+	/** Seed for random initialization */
+	unsigned int m_Seed;
+	/** The input list sample */
+	ListSamplePointerType m_ListSample;
+	/** Behavior of the Learning weightening (link to the beta coefficient) */
+	SOMLearningBehaviorFunctorType m_BetaFunctor;
+	/** Behavior of the Neighborhood extent */
+	SOMNeighborhoodBehaviorFunctorType m_NeighborhoodSizeFunctor;
+	/** Write the SOM Map vectors in a txt file */
+	bool m_WriteMap;
+};
+
+
+} // end namespace otb
+
+
+#ifndef OTB_MANUAL_INSTANTIATION
+#include "SOMModel.txx"
+#endif
+
+
+#endif
+
diff --git a/Modules/Learning/DimensionalityReduction/include/SOMModel.txx b/Modules/Learning/DimensionalityReduction/include/SOMModel.txx
new file mode 100644
index 0000000000000000000000000000000000000000..6195b9f96189f1a781a6db581da79aa131f502e0
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/SOMModel.txx
@@ -0,0 +1,208 @@
+#ifndef SOMModel_txx
+#define SOMModel_txx
+
+#include "otbImageFileReader.h"
+#include "otbImageFileWriter.h"
+
+#include "itkMacro.h"
+
+
+// test text file
+#include "itkImageRegionIterator.h"
+#include "itkImageRegionConstIterator.h"
+#include <fstream> 
+
+
+#include "itkImage.h"
+
+namespace otb
+{
+
+
+template <class TInputValue, unsigned int MapDimension>
+SOMModel<TInputValue,  MapDimension>::SOMModel()
+{
+	this->m_Dimension = MapType::ImageDimension;
+}
+
+
+template <class TInputValue, unsigned int MapDimension>
+SOMModel<TInputValue, MapDimension>::~SOMModel()
+{
+}
+
+
+template <class TInputValue, unsigned int MapDimension>
+void SOMModel<TInputValue,  MapDimension>::Train()
+{
+	
+    typename EstimatorType::Pointer estimator = EstimatorType::New();
+	
+    estimator->SetListSample(m_ListSample);
+    estimator->SetMapSize(m_MapSize);
+    estimator->SetNeighborhoodSizeInit(m_NeighborhoodSizeInit);
+    estimator->SetNumberOfIterations(m_NumberOfIterations);
+    estimator->SetBetaInit(m_BetaInit);
+    estimator->SetBetaEnd(m_BetaEnd);
+    estimator->SetMaxWeight(m_MaxWeight);
+    //AddProcess(estimator,"Learning");
+    estimator->Update();
+    m_SOMMap = estimator->GetOutput();
+   }
+
+
+
+template <class TInputValue, unsigned int MapDimension>
+bool SOMModel<TInputValue, MapDimension>::CanReadFile(const std::string & filename)
+{
+	try
+	{
+		this->Load(filename);
+	}
+	catch(...)
+	{
+	return false;
+	}
+	return true;
+}
+
+
+template <class TInputValue, unsigned int MapDimension>
+bool SOMModel<TInputValue, MapDimension>::CanWriteFile(const std::string & filename)
+{
+	return true;
+}
+
+template<typename T>
+std::ostream& binary_write(std::ostream& stream, const T& value){
+    return stream.write(reinterpret_cast<const char*>(&value), sizeof(T));
+}
+
+
+std::ostream& binary_write_string(std::ofstream& stream, const std::string& value){
+    return stream.write(value.c_str(), value.length());
+}
+
+template<typename T>
+std::istream & binary_read(std::istream& stream, T& value){
+    return stream.read(reinterpret_cast<char*>(&value), sizeof(T));
+}
+
+
+
+template <class TInputValue, unsigned int MapDimension>
+void SOMModel<TInputValue, MapDimension>::Save(const std::string & filename, const std::string & name)
+{
+	itk::ImageRegionConstIterator<MapType> inputIterator(m_SOMMap,m_SOMMap->GetLargestPossibleRegion());
+	inputIterator.GoToBegin();
+	std::ofstream ofs(filename, std::ios::binary);
+	binary_write_string(ofs,"som"); 
+	binary_write(ofs,static_cast<unsigned int>(MapDimension));
+	SizeType size = m_SOMMap->GetLargestPossibleRegion().GetSize() ;
+	for (size_t i=0;i<MapDimension;i++){
+		binary_write(ofs,size[i]);
+	}
+	  
+	binary_write(ofs,inputIterator.Get().GetNumberOfElements());
+	while(!inputIterator.IsAtEnd()){
+		InputSampleType vect = inputIterator.Get();
+		for (size_t i=0;i<vect.GetNumberOfElements();i++){
+			binary_write(ofs,vect[i]);
+		}	
+	++inputIterator;
+	}
+	ofs.close();
+	
+	if (this->m_WriteMap == true)     // output the map vectors in a txt file
+	{
+		std::ofstream otxt(filename+".txt");
+		inputIterator.GoToBegin();
+		while(!inputIterator.IsAtEnd())
+		{
+			InputSampleType vect = inputIterator.Get();
+			for (size_t i=0;i<vect.GetNumberOfElements();i++)
+			{
+				
+				otxt << vect[i] << " ";
+			}	
+			otxt << std::endl;
+			++inputIterator;
+		}
+		otxt.close();
+	}
+}
+
+template <class TInputValue, unsigned int MapDimension>
+void SOMModel<TInputValue, MapDimension>::Load(const std::string & filename, const std::string & name)
+{
+	
+	std::ifstream ifs(filename, std::ios::binary);
+	
+	/**  Read the model key (should be som) */
+	char s[]="   ";
+	for (int i=0; i<3; i++){	
+		binary_read(ifs,s[i]);
+	}
+	std::string modelType(s);
+	/** Read the dimension of the map (should be equal to MapDimension) */
+	
+	unsigned int dimension;
+	binary_read(ifs,dimension);
+	if (modelType != "som" || dimension != MapDimension){
+		itkExceptionMacro(<< "Error opening " << filename.c_str() );
+    }
+    
+	SizeType size;
+	itk::Index< MapDimension > index;
+	for (int i=0 ; i<MapDimension; i++)
+	{
+		binary_read(ifs,size[i]);
+		index[i]=0;
+	}
+	unsigned int numberOfElements;
+	binary_read(ifs,numberOfElements);
+	m_SOMMap = MapType::New();
+	typename MapType::RegionType region;
+	region.SetSize( size );
+	m_SOMMap->SetNumberOfComponentsPerPixel(numberOfElements);
+	region.SetIndex( index );
+	m_SOMMap->SetRegions( region );
+	m_SOMMap->Allocate();
+
+	itk::ImageRegionIterator<MapType> outputIterator(m_SOMMap,region);
+    outputIterator.GoToBegin();
+	std::string value;
+	while(!outputIterator.IsAtEnd()){
+		InputSampleType  vect(numberOfElements);
+		for (int i=0 ; i<numberOfElements; i++)
+		{
+			float v;    // InputValue type is not the same during training anddimredvector.
+			binary_read(ifs,v);
+			vect[i] = static_cast<double>(v);
+		}
+		outputIterator.Set(vect);
+		++outputIterator;
+	}
+	ifs.close();
+	
+	this->m_Dimension = MapType::ImageDimension;
+}
+
+
+template <class TInputValue, unsigned int MapDimension>
+typename SOMModel<TInputValue, MapDimension>::TargetSampleType
+SOMModel<TInputValue, MapDimension>::DoPredict(const InputSampleType & value, ConfidenceValueType * quality) const
+{ 
+    TargetSampleType target;
+    target.SetSize(this->m_Dimension);
+	
+    auto winner =m_SOMMap->GetWinner(value);
+    for (int i=0; i< this->m_Dimension ;i++) {
+		target[i] = winner.GetElement(i); 
+	}
+
+	return target;
+}
+
+} // namespace otb
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/SOMModelFactory.h b/Modules/Learning/DimensionalityReduction/include/SOMModelFactory.h
new file mode 100644
index 0000000000000000000000000000000000000000..c078b07106f52a269aa42ec4e9404675321da65a
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/SOMModelFactory.h
@@ -0,0 +1,59 @@
+#ifndef SOMModelFactory_h
+#define SOMModelFactory_h
+
+
+#include "itkObjectFactoryBase.h"
+#include "itkImageIOBase.h"
+
+namespace otb
+{
+	
+template <class TInputValue, class TTargetValue, unsigned int MapDimension>
+class ITK_EXPORT SOMModelFactory : public itk::ObjectFactoryBase
+{
+public:
+  /** Standard class typedefs. */
+  typedef SOMModelFactory   Self;
+  typedef itk::ObjectFactoryBase        Superclass;
+  typedef itk::SmartPointer<Self>       Pointer;
+  typedef itk::SmartPointer<const Self> ConstPointer;
+
+  /** Class methods used to interface with the registered factories. */
+  const char* GetITKSourceVersion(void) const ITK_OVERRIDE;
+  const char* GetDescription(void) const ITK_OVERRIDE;
+
+  /** Method for class instantiation. */
+  itkFactorylessNewMacro(Self);
+
+  /** Run-time type information (and related methods). */
+  itkTypeMacro(SOMModelFactory, itk::ObjectFactoryBase);
+
+  /** Register one factory of this type  */
+  static void RegisterOneFactory(void)
+  {
+    Pointer SOMFactory = SOMModelFactory::New();
+    itk::ObjectFactoryBase::RegisterFactory(SOMFactory);
+  }
+
+protected:
+  SOMModelFactory();
+  ~SOMModelFactory() ITK_OVERRIDE;
+
+private:
+  SOMModelFactory(const Self &); //purposely not implemented
+  void operator =(const Self&); //purposely not implemented
+
+};
+
+
+
+} //namespace otb
+
+
+#ifndef OTB_MANUAL_INSTANTIATION
+#include "SOMModelFactory.txx"
+#endif
+
+#endif
+
+
diff --git a/Modules/Learning/DimensionalityReduction/include/SOMModelFactory.txx b/Modules/Learning/DimensionalityReduction/include/SOMModelFactory.txx
new file mode 100644
index 0000000000000000000000000000000000000000..396a5f0ac9361aa92897b2bfb6f18f0f3a6a0630
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/SOMModelFactory.txx
@@ -0,0 +1,65 @@
+/*=========================================================================
+
+  Program:   ORFEO Toolbox
+  Language:  C++
+  Date:      $Date$
+  Version:   $Revision$
+
+
+  Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+  See OTBCopyright.txt for details.
+
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notices for more information.
+
+=========================================================================*/
+#ifndef SOMFactory_txx
+#define SOMFactory_txx
+
+
+#include "SOMModelFactory.h"
+
+#include "itkCreateObjectFunction.h"
+#include "SOMModel.h"
+//#include <shark/Algorithms/Trainers/PCA.h>
+#include "itkVersion.h"
+
+namespace otb
+{
+template <class TInputValue, class TOutputValue, unsigned int MapDimension>
+SOMModelFactory<TInputValue,TOutputValue,MapDimension>::SOMModelFactory()
+{
+
+  std::string classOverride = std::string("DimensionalityReductionModel");
+  std::string subclass = std::string("SOMModel");
+
+  this->RegisterOverride(classOverride.c_str(),
+                         subclass.c_str(),
+                         "SOM DR Model",
+                         1,
+                      //   itk::CreateObjectFunction<AutoencoderModel<TInputValue,TOutputValue> >::New());
+						itk::CreateObjectFunction<SOMModel<TInputValue,  MapDimension>>::New());
+}
+
+template <class TInputValue, class TOutputValue, unsigned int MapDimension>
+SOMModelFactory<TInputValue,TOutputValue,MapDimension>::~SOMModelFactory()
+{
+}
+
+template <class TInputValue, class TOutputValue, unsigned int MapDimension>
+const char* SOMModelFactory<TInputValue,TOutputValue,MapDimension>::GetITKSourceVersion(void) const
+{
+  return ITK_SOURCE_VERSION;
+}
+
+template <class TInputValue, class TOutputValue, unsigned int MapDimension>
+const char* SOMModelFactory<TInputValue,TOutputValue,MapDimension>::GetDescription() const
+{
+  return "SOM model factory";
+}
+
+} // end namespace otb
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/cbLearningApplicationBaseDR.h b/Modules/Learning/DimensionalityReduction/include/cbLearningApplicationBaseDR.h
new file mode 100644
index 0000000000000000000000000000000000000000..281cc1866bbcc6eb3799c8aa9b8b4abbc2b90a82
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/cbLearningApplicationBaseDR.h
@@ -0,0 +1,171 @@
+#ifndef cbLearningApplicationBaseDR_h
+#define cbLearningApplicationBaseDR_h
+
+#include "otbConfigure.h"
+
+#include "otbWrapperApplication.h"
+
+#include <iostream>
+
+// ListSample
+#include "itkListSample.h"
+#include "itkVariableLengthVector.h"
+
+//Estimator
+#include "DimensionalityReductionModelFactory.h"
+
+#include "SOMModel.h"
+
+#ifdef OTB_USE_SHARK
+#include "AutoencoderModel.h"
+#include "PCAModel.h"
+#endif
+
+namespace otb
+{
+namespace Wrapper
+{
+
+/** \class LearningApplicationBase
+ *  \brief LearningApplicationBase is the base class for application that
+ *         use machine learning model.
+ *
+ * This base class offers a DoInit() method to initialize all the parameters
+ * related to machine learning models. They will all be in the choice parameter
+ * named "classifier". The class also offers generic Train() and Classify()
+ * methods. The classes derived from LearningApplicationBase only need these
+ * 3 methods to handle the machine learning model.
+ *
+ * There are multiple machine learning models in OTB, some imported
+ * from OpenCV and one imported from LibSVM. They all have
+ * different parameters. The purpose of this class is to handle the
+ * creation of all parameters related to machine learning models (in
+ * DoInit() ), and to dispatch the calls to specific train functions
+ * in function Train().
+ *
+ * This class is templated over scalar types for input and output values.
+ * Typically, the input value type will be either float of double. The choice
+ * of an output value type depends on the learning mode. This base class
+ * supports both classification and regression modes. For classification
+ * (enabled by default), the output value type corresponds to a class
+ * identifier so integer types suit well. For regression, the output value
+ * should not be an integer type, but rather a floating point type. In addition,
+ * an application deriving this base class for regression should initialize
+ * the m_RegressionFlag to true in their constructor.
+ *
+ * \sa TrainImagesClassifier
+ * \sa TrainRegression
+ *
+ * \ingroup OTBAppClassification
+ */
+template <class TInputValue, class TOutputValue>
+class cbLearningApplicationBaseDR: public Application
+{
+public:
+	/** Standard class typedefs. */
+	typedef cbLearningApplicationBaseDR Self;
+	typedef Application             Superclass;
+	typedef itk::SmartPointer<Self> Pointer;
+	typedef itk::SmartPointer<const Self> ConstPointer;
+
+	/** Standard macro */
+	itkTypeMacro(cbLearningApplicationBaseDR, otb::Application)
+
+	typedef TInputValue                             InputValueType;
+	typedef TOutputValue                            OutputValueType;
+
+	typedef otb::VectorImage<InputValueType>        SampleImageType;
+	typedef typename SampleImageType::PixelType     PixelType;
+
+	typedef otb::DimensionalityReductionModelFactory<
+				InputValueType, OutputValueType>             ModelFactoryType;
+	typedef typename ModelFactoryType::DimensionalityReductionModelTypePointer ModelPointerType;
+	typedef typename ModelFactoryType::DimensionalityReductionModelType        ModelType;
+	  
+	typedef typename ModelType::InputSampleType     SampleType;
+	typedef typename ModelType::InputListSampleType ListSampleType;
+	  
+	// Dimensionality reduction models
+
+	//typedef SOMMap<TInputValue,itk::Statistics::EuclideanDistanceMetric<itk::VariableLengthVector<TInputValue>>, 2> Map2DType;
+	typedef otb::SOMModel<InputValueType, 2> SOM2DModelType;
+	
+	//typedef SOMMap<TInputValue,itk::Statistics::EuclideanDistanceMetric<itk::VariableLengthVector<TInputValue>>, 3> Map3DType;
+	typedef otb::SOMModel<InputValueType, 3> SOM3DModelType;
+	
+	//typedef SOMMap<TInputValue,itk::Statistics::EuclideanDistanceMetric<itk::VariableLengthVector<TInputValue>>, 4> Map4DType;
+	typedef otb::SOMModel<InputValueType, 4> SOM4DModelType;
+	
+	//typedef SOMMap<TInputValue,itk::Statistics::EuclideanDistanceMetric<itk::VariableLengthVector<TInputValue>>, 5> Map5DType;
+	typedef otb::SOMModel<InputValueType, 5> SOM5DModelType;
+	
+
+#ifdef OTB_USE_SHARK
+
+	// typedef shark::Autoencoder< shark::TanhNeuron, shark::LinearNeuron> AutoencoderType;
+	typedef shark::LogisticNeuron NeuronType;
+	typedef otb::AutoencoderModel<InputValueType, NeuronType> AutoencoderModelType;
+	/*
+	// typedef shark::TiedAutoencoder< shark::TanhNeuron, shark::LinearNeuron> TiedAutoencoderType;
+	typedef shark::TiedAutoencoder< shark::TanhNeuron, shark::TanhNeuron> TiedAutoencoderType;
+	typedef otb::AutoencoderModel<InputValueType, TiedAutoencoderType> TiedAutoencoderModelType;
+	*/
+	typedef otb::PCAModel<InputValueType> PCAModelType;
+#endif
+  
+protected:
+  cbLearningApplicationBaseDR();
+
+  ~cbLearningApplicationBaseDR() ITK_OVERRIDE;
+
+  /** Generic method to train and save the machine learning model. This method
+   * uses specific train methods depending on the chosen model.*/
+  void Train(typename ListSampleType::Pointer trainingListSample,
+             std::string modelPath);
+
+  /** Generic method to load a model file and use it to classify a sample list*/
+  void Reduce(typename ListSampleType::Pointer validationListSample,
+                std::string modelPath);
+
+  /** Init method that creates all the parameters for machine learning models */
+  void DoInit();
+
+private:
+
+  /** Specific Init and Train methods for each machine learning model */
+  //@{
+  
+  void InitSOMParams();
+  template <class somchoice>
+  void TrainSOM(typename ListSampleType::Pointer trainingListSample, std::string modelPath);
+  void BeforeTrainSOM(typename ListSampleType::Pointer trainingListSample, std::string modelPath);
+
+#ifdef OTB_USE_SHARK
+  void InitAutoencoderParams();
+  void InitPCAParams();
+  
+  void BeforeTrainAutoencoder(typename ListSampleType::Pointer trainingListSample, std::string modelPath);
+  template <class autoencoderchoice>
+  void TrainAutoencoder(typename ListSampleType::Pointer trainingListSample, std::string modelPath);
+  
+  void TrainPCA(typename ListSampleType::Pointer trainingListSample, std::string modelPath);
+  
+ 
+#endif  
+  //@}
+};
+
+}
+}
+
+#ifndef OTB_MANUAL_INSTANTIATION
+#include "cbLearningApplicationBaseDR.txx"
+#include "cbTrainSOM.txx"
+
+#ifdef OTB_USE_SHARK
+#include "cbTrainAutoencoder.txx"
+#include "cbTrainPCA.txx"
+#endif
+#endif
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/cbLearningApplicationBaseDR.txx b/Modules/Learning/DimensionalityReduction/include/cbLearningApplicationBaseDR.txx
new file mode 100644
index 0000000000000000000000000000000000000000..19b5e4e414f98c1fb49d7d62a8f08dfc1946e8ab
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/cbLearningApplicationBaseDR.txx
@@ -0,0 +1,115 @@
+/*=========================================================================
+ Program:   ORFEO Toolbox
+ Language:  C++
+ Date:      $Date$
+ Version:   $Revision$
+
+
+ Copyright (c) Centre National d'Etudes Spatiales. All rights reserved.
+ See OTBCopyright.txt for details.
+
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notices for more information.
+
+ =========================================================================*/
+#ifndef cbLearningApplicationBaseDR_txx
+#define cbLearningApplicationBaseDR_txx
+
+#include "cbLearningApplicationBaseDR.h"
+
+namespace otb
+{
+namespace Wrapper
+{
+
+template <class TInputValue, class TOutputValue>
+cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::cbLearningApplicationBaseDR() 
+{
+}
+
+template <class TInputValue, class TOutputValue>
+cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::~cbLearningApplicationBaseDR()
+{
+  ModelFactoryType::CleanFactories();
+}
+
+template <class TInputValue, class TOutputValue>
+void
+cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::DoInit()
+{
+  AddDocTag(Tags::Learning);
+
+  // main choice parameter that will contain all dimensionality reduction options
+  AddParameter(ParameterType_Choice, "model", "model to use for the training");
+  SetParameterDescription("model", "Choice of the dimensionality reduction model to use for the training.");
+  
+
+  InitSOMParams();
+  
+#ifdef OTB_USE_SHARK
+  InitAutoencoderParams();
+  InitPCAParams();
+#endif
+  
+}
+
+template <class TInputValue, class TOutputValue>
+void
+cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::Reduce(typename ListSampleType::Pointer validationListSample,std::string modelPath)
+{
+}
+
+template <class TInputValue, class TOutputValue>
+void
+cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::Train(typename ListSampleType::Pointer trainingListSample,
+        std::string modelPath)
+{
+ 
+ // get the name of the chosen machine learning model
+ const std::string modelName = GetParameterString("model");
+ // call specific train function
+ 
+ if(modelName == "som")
+	{
+		BeforeTrainSOM(trainingListSample,modelPath);
+	}
+ 
+ if(modelName == "autoencoder")
+    {
+		#ifdef OTB_USE_SHARK
+		BeforeTrainAutoencoder(trainingListSample,modelPath);
+		#else
+		otbAppLogFATAL("Module SharkLearning is not installed. You should consider turning OTB_USE_SHARK on during cmake configuration.");
+		#endif
+    }
+    /*
+  if(modelName == "tiedautoencoder")
+    {
+		#ifdef OTB_USE_SHARK
+		TrainAutoencoder<TiedAutoencoderModelType>(trainingListSample,modelPath);
+		#else
+		otbAppLogFATAL("Module SharkLearning is not installed. You should consider turning OTB_USE_SHARK on during cmake configuration.");
+		#endif
+    }
+    */
+  if(modelName == "pca")
+    {
+		#ifdef OTB_USE_SHARK
+		TrainPCA(trainingListSample,modelPath);
+		#else
+		otbAppLogFATAL("Module SharkLearning is not installed. You should consider turning OTB_USE_SHARK on during cmake configuration.");
+		#endif
+    }
+}
+
+}
+}
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/cbTrainAutoencoder.txx b/Modules/Learning/DimensionalityReduction/include/cbTrainAutoencoder.txx
new file mode 100644
index 0000000000000000000000000000000000000000..0da00f85eade8f7fc758b600acdefadda197c807
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/cbTrainAutoencoder.txx
@@ -0,0 +1,189 @@
+
+#ifndef cbTrainAutoencoder_txx
+#define cbTrainAutoencoder_txx
+
+#include "cbLearningApplicationBaseDR.h"
+
+namespace otb
+{
+namespace Wrapper
+{
+
+template <class TInputValue, class TOutputValue>
+void
+cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::InitAutoencoderParams()
+{
+
+
+  AddChoice("model.tiedautoencoder", "Shark Tied Autoencoder");
+  AddChoice("model.autoencoder", "Shark Autoencoder");
+  SetParameterDescription("model.autoencoder",
+                          "This group of parameters allows setting Shark autoencoder parameters. "
+                          );
+                          
+  
+  //Tied Autoencoder
+  AddParameter(ParameterType_Choice, "model.autoencoder.istied",
+               "tied weighth <tied/untied>");
+  SetParameterDescription(
+    "model.autoencoder.istied",
+    "Parameter that determine if the weights are tied or not <tied/untied>");
+                          
+        
+  AddChoice("model.autoencoder.istied.yes","Tied weigths");
+  AddChoice("model.autoencoder.istied.no","Untied weights");
+                  
+                          
+                          
+  //Number Of Iterations
+  AddParameter(ParameterType_Int, "model.autoencoder.nbiter",
+               "Maximum number of iterations during training");
+  SetParameterInt("model.autoencoder.nbiter",100, false);
+  SetParameterDescription(
+    "model.autoencoder.nbiter",
+    "The maximum number of iterations used during training.");
+   
+  AddParameter(ParameterType_Int, "model.autoencoder.nbiterfinetuning",
+               "Maximum number of iterations during training");
+  SetParameterInt("model.autoencoder.nbiterfinetuning",0, false);
+  SetParameterDescription(
+    "model.autoencoder.nbiterfinetuning",
+    "The maximum number of iterations used during fine tuning of the whole network.");
+  
+  AddParameter(ParameterType_Float, "model.autoencoder.epsilon",
+               " ");
+  SetParameterFloat("model.autoencoder.epsilon",0, false);
+  SetParameterDescription(
+    "model.autoencoder.epsilon",
+    " ");
+  
+  
+  AddParameter(ParameterType_Float, "model.autoencoder.initfactor",
+               " ");
+  SetParameterFloat("model.autoencoder.initfactor",1, false);
+  SetParameterDescription(
+    "model.autoencoder.initfactor", "parameter that control the weight initialization of the autoencoder");
+  
+   //Number Of Hidden Neurons
+  AddParameter(ParameterType_StringList ,  "model.autoencoder.nbneuron",   "Size");
+  /*AddParameter(ParameterType_Int, "model.autoencoder.nbneuron",
+               "Number of neurons in the hidden layer");
+  SetParameterInt("model.autoencoder.nbneuron",10, false);*/
+  SetParameterDescription(
+    "model.autoencoder.nbneuron",
+    "The number of neurons in each hidden layer.");
+  
+  //Regularization
+  AddParameter(ParameterType_StringList, "model.autoencoder.regularization", "Strength of the regularization");
+  SetParameterDescription("model.autoencoder.regularization", 
+                         "Strength of the L2 regularization used during training");
+                         
+  //Noise strength
+  AddParameter(ParameterType_StringList, "model.autoencoder.noise", "Strength of the noise");
+  SetParameterDescription("model.autoencoder.noise", 
+                         "Strength of the noise");
+  
+  // Sparsity parameter
+  AddParameter(ParameterType_StringList, "model.autoencoder.rho", "Sparsity parameter");
+  SetParameterDescription("model.autoencoder.rho", 
+                         "Sparsity parameter");
+  
+  // Sparsity regularization strength
+  AddParameter(ParameterType_StringList, "model.autoencoder.beta", "Sparsity regularization strength");
+  SetParameterDescription("model.autoencoder.beta", 
+                         "Sparsity regularization strength");
+                         
+  AddParameter(ParameterType_OutputFilename, "model.autoencoder.learningcurve", "Learning curve");
+  SetParameterDescription("model.autoencoder.learningcurve", "Learning error values");
+  MandatoryOff("model.autoencoder.learningcurve");
+		
+}
+
+
+template <class TInputValue, class TOutputValue>
+void
+cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::BeforeTrainAutoencoder(typename ListSampleType::Pointer trainingListSample,
+        std::string modelPath)
+{		
+	std::string TiedWeigth = GetParameterString("model.autoencoder.istied");
+	std::cout << TiedWeigth << std::endl;
+		
+	if(TiedWeigth == "no")
+		{
+		TrainAutoencoder<AutoencoderModelType>(trainingListSample,modelPath);
+		}
+	/*	
+	if(TiedWeigth == "yes")
+		{
+		TrainAutoencoder<TiedAutoencoderModelType>(trainingListSample,modelPath);
+		}
+	 */
+	if(TiedWeigth != "yes" && TiedWeigth != "no")
+		{
+			std::cerr << "istied : invalid choice <yes/no>" << std::endl;
+		}
+}
+
+
+
+template <class TInputValue, class TOutputValue>
+template <typename autoencoderchoice>
+void cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::TrainAutoencoder(typename ListSampleType::Pointer trainingListSample,std::string modelPath)
+{
+		typename autoencoderchoice::Pointer dimredTrainer = autoencoderchoice::New();
+		itk::Array<unsigned int> nb_neuron;
+		itk::Array<float> noise;
+		itk::Array<float> regularization;
+		itk::Array<float> rho;
+		itk::Array<float> beta;
+		std::vector<std::basic_string<char>> s_nbneuron= GetParameterStringList("model.autoencoder.nbneuron");
+		std::vector<std::basic_string<char>> s_noise= GetParameterStringList("model.autoencoder.noise");
+		std::vector<std::basic_string<char>> s_regularization= GetParameterStringList("model.autoencoder.regularization");
+		std::vector<std::basic_string<char>> s_rho= GetParameterStringList("model.autoencoder.rho");
+		std::vector<std::basic_string<char>> s_beta= GetParameterStringList("model.autoencoder.beta");
+		nb_neuron.SetSize(s_nbneuron.size());
+		noise.SetSize(s_nbneuron.size());
+		regularization.SetSize(s_nbneuron.size());
+		rho.SetSize(s_nbneuron.size());
+		beta.SetSize(s_nbneuron.size());
+		for (int i=0; i<s_nbneuron.size(); i++){ 
+			nb_neuron[i]=std::stoi(s_nbneuron[i]);
+			noise[i]=std::stof(s_noise[i]);
+			regularization[i]=std::stof(s_regularization[i]);
+			rho[i]=std::stof(s_rho[i]);
+			beta[i]=std::stof(s_beta[i]);
+		}
+		dimredTrainer->SetNumberOfHiddenNeurons(nb_neuron);
+		dimredTrainer->SetNumberOfIterations(GetParameterInt("model.autoencoder.nbiter"));
+		dimredTrainer->SetNumberOfIterationsFineTuning(GetParameterInt("model.autoencoder.nbiterfinetuning"));
+		dimredTrainer->SetEpsilon(GetParameterFloat("model.autoencoder.epsilon"));
+		dimredTrainer->SetInitFactor(GetParameterFloat("model.autoencoder.initfactor"));
+		dimredTrainer->SetRegularization(regularization);
+		dimredTrainer->SetNoise(noise);
+		dimredTrainer->SetRho(rho);
+		dimredTrainer->SetBeta(beta);
+		
+		dimredTrainer->SetWriteWeights(true);
+		if (HasValue("model.autoencoder.learningcurve") && IsParameterEnabled("model.autoencoder.learningcurve"))
+		{
+			std::cout << "yo" << std::endl;
+			dimredTrainer->SetWriteLearningCurve(true);
+			dimredTrainer->SetLearningCurveFileName(GetParameterString("model.autoencoder.learningcurve"));
+		}
+		
+		dimredTrainer->SetInputListSample(trainingListSample);
+		std::cout << "before train" << std::endl;
+		dimredTrainer->Train();
+		std::cout << "after train" << std::endl;
+		dimredTrainer->Save(modelPath);	
+		
+
+}
+
+} //end namespace wrapper
+} //end namespace otb
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/cbTrainPCA.txx b/Modules/Learning/DimensionalityReduction/include/cbTrainPCA.txx
new file mode 100644
index 0000000000000000000000000000000000000000..d51926469b1eadea2f01edc6faacc617e6c41770
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/cbTrainPCA.txx
@@ -0,0 +1,51 @@
+
+#ifndef cbTrainPCA_txx
+#define cbTrainPCA_txx
+
+#include "cbLearningApplicationBaseDR.h"
+
+namespace otb
+{
+namespace Wrapper
+{
+
+template <class TInputValue, class TOutputValue>
+void
+cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::InitPCAParams()
+{
+
+
+  AddChoice("model.pca", "Shark PCA");
+  SetParameterDescription("model.pca",
+                          "This group of parameters allows setting Shark PCA parameters. "
+                          );
+  
+  
+   //Output Dimension
+  AddParameter(ParameterType_Int, "model.pca.dim",
+               "Dimension of the output of the pca transformation");
+  SetParameterInt("model.pca.dim",10, false);
+  SetParameterDescription(
+    "model.pca.dim",
+    "Dimension of the output of the pca transformation.");
+  
+ 
+}
+
+template <class TInputValue, class TOutputValue>
+void cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::TrainPCA(typename ListSampleType::Pointer trainingListSample,std::string modelPath)
+{
+		typename PCAModelType::Pointer dimredTrainer = PCAModelType::New();
+		dimredTrainer->SetDimension(GetParameterInt("model.pca.dim"));
+		dimredTrainer->SetInputListSample(trainingListSample);
+		dimredTrainer->SetWriteEigenvectors(true);
+		dimredTrainer->Train();
+		dimredTrainer->Save(modelPath);
+}
+
+} //end namespace wrapper
+} //end namespace otb
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/include/cbTrainSOM.txx b/Modules/Learning/DimensionalityReduction/include/cbTrainSOM.txx
new file mode 100644
index 0000000000000000000000000000000000000000..4c6f5c89f14c76c552ae13c808ada8a818d18323
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/include/cbTrainSOM.txx
@@ -0,0 +1,147 @@
+
+#ifndef cbTrainSOM_txx
+#define cbTrainSOM_txx
+#include "cbLearningApplicationBaseDR.h"
+
+namespace otb
+{
+namespace Wrapper
+{
+
+template <class TInputValue, class TOutputValue>
+void
+cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::InitSOMParams()
+{
+
+  AddChoice("model.som", "OTB SOM");
+  SetParameterDescription("model.som",
+                          "This group of parameters allows setting SOM parameters. "
+                          );
+
+	AddParameter(ParameterType_Int, "model.som.dim","Dimension of the map");
+	SetParameterDescription("model.som.dim","Dimension of the SOM map.");
+  
+	AddParameter(ParameterType_StringList ,  "model.som.s",   "Size");
+    SetParameterDescription("model.som.s", "Size of the SOM map");
+    MandatoryOff("model.som.s");
+    
+	AddParameter(ParameterType_StringList ,  "model.som.n",   "Size Neighborhood");
+    SetParameterDescription("model.som.n", "Size of the initial neighborhood in the SOM map");
+    MandatoryOff("model.som.n");
+    
+    AddParameter(ParameterType_Int,  "model.som.sx",   "SizeX");
+    SetParameterDescription("model.som.sx", "X size of the SOM map");
+    MandatoryOff("model.som.sx");
+
+    AddParameter(ParameterType_Int,  "model.som.sy",   "SizeY");
+    SetParameterDescription("model.som.sy", "Y size of the SOM map");
+    MandatoryOff("model.som.sy");
+
+    AddParameter(ParameterType_Int,  "model.som.nx",   "NeighborhoodX");
+    SetParameterDescription("model.som.nx", "X size of the initial neighborhood in the SOM map");
+    MandatoryOff("model.som.nx");
+
+    AddParameter(ParameterType_Int,  "model.som.ny",   "NeighborhoodY");
+    SetParameterDescription("model.som.ny", "Y size of the initial neighborhood in the SOM map");
+    MandatoryOff("model.som.nx");
+
+    AddParameter(ParameterType_Int,  "model.som.ni",   "NumberIteration");
+    SetParameterDescription("model.som.ni", "Number of iterations for SOM learning");
+    MandatoryOff("model.som.ni");
+
+    AddParameter(ParameterType_Float,  "model.som.bi",   "BetaInit");
+    SetParameterDescription("model.som.bi", "Initial learning coefficient");
+    MandatoryOff("model.som.bi");
+
+    AddParameter(ParameterType_Float,  "model.som.bf",   "BetaFinal");
+    SetParameterDescription("model.som.bf", "Final learning coefficient");
+    MandatoryOff("model.som.bf");
+
+    AddParameter(ParameterType_Float,  "model.som.iv",   "InitialValue");
+    SetParameterDescription("model.som.iv", "Maximum initial neuron weight");
+    MandatoryOff("model.som.iv");
+    
+    SetDefaultParameterInt("model.som.sx", 32);
+    SetDefaultParameterInt("model.som.sy", 32);
+    SetDefaultParameterInt("model.som.nx", 10);
+    SetDefaultParameterInt("model.som.ny", 10);
+    SetDefaultParameterInt("model.som.ni", 5);
+    SetDefaultParameterFloat("model.som.bi", 1.0);
+    SetDefaultParameterFloat("model.som.bf", 0.1);
+    SetDefaultParameterFloat("model.som.iv", 10.0);
+
+ 
+}
+
+template <class TInputValue, class TOutputValue>
+void
+cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::BeforeTrainSOM(typename ListSampleType::Pointer trainingListSample,
+        std::string modelPath)
+{		
+	int SomDim = GetParameterInt("model.som.dim");
+	std::cout << SomDim << std::endl;
+		
+	if(SomDim == 2)
+		{
+		TrainSOM<SOM2DModelType >(trainingListSample,modelPath);
+		}
+		
+	if(SomDim == 3)
+		{
+		TrainSOM<SOM3DModelType >(trainingListSample,modelPath);
+		}
+	 
+	if(SomDim == 4)
+		{
+		TrainSOM<SOM4DModelType >(trainingListSample,modelPath);
+		}
+	 
+	if(SomDim == 5)
+		{
+		TrainSOM<SOM5DModelType >(trainingListSample,modelPath);
+		}   
+	if(SomDim > 5 || SomDim < 2)
+		{
+			std::cerr << "k : invalid dimension" << std::endl;
+		}
+}
+
+
+template <class TInputValue, class TOutputValue>
+template <typename somchoice>
+void cbLearningApplicationBaseDR<TInputValue,TOutputValue>
+::TrainSOM(typename ListSampleType::Pointer trainingListSample,std::string modelPath)
+{
+		using TemplateEstimatorType = typename somchoice::EstimatorType;
+		typename somchoice::Pointer dimredTrainer = somchoice::New();
+		unsigned int dim = dimredTrainer->GetDimension();
+		std::cout << dim << std::endl;
+		dimredTrainer->SetNumberOfIterations(GetParameterInt("model.som.ni"));
+		dimredTrainer->SetBetaInit(GetParameterFloat("model.som.bi"));
+		dimredTrainer->SetWriteMap(true);
+		dimredTrainer->SetBetaEnd(GetParameterFloat("model.som.bf"));
+		dimredTrainer->SetMaxWeight(GetParameterFloat("model.som.iv"));
+		typename TemplateEstimatorType::SizeType size;
+		std::vector<std::basic_string<char>> s= GetParameterStringList("model.som.s");
+		for (int i=0; i<dim; i++){ 
+			size[i]=std::stoi(s[i]);
+		}
+		
+        dimredTrainer->SetMapSize(size);
+        typename TemplateEstimatorType::SizeType radius;
+		std::vector<std::basic_string<char>> n= GetParameterStringList("model.som.n");
+		for (int i=0; i<dim; i++){ 
+			radius[i]=std::stoi(n[i]);
+		}
+        dimredTrainer->SetNeighborhoodSizeInit(radius);
+        dimredTrainer->SetListSample(trainingListSample);
+		dimredTrainer->Train();
+		dimredTrainer->Save(modelPath);
+}
+
+} //end namespace wrapper
+} //end namespace otb
+
+#endif
diff --git a/Modules/Learning/DimensionalityReduction/otb-module.cmake b/Modules/Learning/DimensionalityReduction/otb-module.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..258fdf1c4476abcf4626b043c0c9c8c78db847c8
--- /dev/null
+++ b/Modules/Learning/DimensionalityReduction/otb-module.cmake
@@ -0,0 +1,14 @@
+set(DOCUMENTATION "Dimensionality reduction application")
+otb_module(CbDimensionalityReduction
+	DEPENDS
+		OTBCommon
+		OTBApplicationEngine
+		OTBITK
+		OTBShark	
+		OTBBoost
+		OTBAppClassification
+		OTBSOM
+		OTBLearningBase
+	DESCRIPTION 
+		"${DOCUMENTATION}"
+)