From e1b491e659be00d1ebf6e84f1a5274d12f944c6c Mon Sep 17 00:00:00 2001 From: Guillaume Pasero <guillaume.pasero@c-s.fr> Date: Fri, 8 Dec 2017 18:24:53 +0100 Subject: [PATCH] STYLE: clean code, use OTB style --- .../app/otbImageDimensionalityReduction.cxx | 58 +- .../app/otbTrainDimensionalityReduction.cxx | 63 +- .../app/otbVectorDimensionalityReduction.cxx | 680 +++++++++--------- ...imensionalityReductionTrainAutoencoder.txx | 165 ++--- .../otbDimensionalityReductionTrainPCA.txx | 19 +- .../otbDimensionalityReductionTrainSOM.txx | 122 ++-- ...inDimensionalityReductionApplicationBase.h | 49 +- ...DimensionalityReductionApplicationBase.txx | 49 +- .../include/otbPCAModel.h | 115 +-- .../include/otbPCAModel.txx | 205 +++--- .../include/otbPCAModelFactory.h | 12 +- .../include/otbPCAModelFactory.txx | 14 +- .../include/otbSOMModel.h | 220 +++--- .../include/otbSOMModel.txx | 318 ++++---- .../include/otbSOMModelFactory.h | 11 +- .../include/otbSOMModelFactory.txx | 14 +- 16 files changed, 1026 insertions(+), 1088 deletions(-) diff --git a/Modules/Applications/AppDimensionalityReduction/app/otbImageDimensionalityReduction.cxx b/Modules/Applications/AppDimensionalityReduction/app/otbImageDimensionalityReduction.cxx index 7ac9ea1d11..e628b951a7 100644 --- a/Modules/Applications/AppDimensionalityReduction/app/otbImageDimensionalityReduction.cxx +++ b/Modules/Applications/AppDimensionalityReduction/app/otbImageDimensionalityReduction.cxx @@ -68,11 +68,15 @@ private: InternalType m_B; }; -} +} // end of namespace Functor namespace Wrapper { - +/** + * \class ImageDimensionalityReduction + * + * Apply a dimensionality reduction model to an image + */ class ImageDimensionalityReduction : public Application { public: @@ -88,21 +92,27 @@ public: itkTypeMacro(ImageDimensionalityReduction, otb::Application); /** Filters typedef */ - typedef UInt8ImageType MaskImageType; - typedef itk::VariableLengthVector<FloatVectorImageType::InternalPixelType> MeasurementType; - typedef otb::StatisticsXMLFileReader<MeasurementType> StatisticsReader; - typedef otb::ShiftScaleVectorImageFilter<FloatVectorImageType, FloatVectorImageType> RescalerType; + typedef UInt8ImageType MaskImageType; + typedef itk::VariableLengthVector< + FloatVectorImageType::InternalPixelType> MeasurementType; + typedef otb::StatisticsXMLFileReader<MeasurementType> StatisticsReader; + typedef otb::ShiftScaleVectorImageFilter< + FloatVectorImageType, FloatVectorImageType> RescalerType; typedef itk::UnaryFunctorImageFilter< FloatImageType, FloatImageType, - otb::Functor::AffineFunctor<float,float> > OutputRescalerType; - typedef otb::ImageDimensionalityReductionFilter<FloatVectorImageType, FloatVectorImageType, MaskImageType> DimensionalityReductionFilterType; - typedef DimensionalityReductionFilterType::Pointer DimensionalityReductionFilterPointerType; - typedef DimensionalityReductionFilterType::ModelType ModelType; - typedef ModelType::Pointer ModelPointerType; - typedef DimensionalityReductionFilterType::ValueType ValueType; - typedef DimensionalityReductionFilterType::LabelType LabelType; - typedef otb::DimensionalityReductionModelFactory<ValueType, LabelType> DimensionalityReductionModelFactoryType; + otb::Functor::AffineFunctor<float,float> > OutputRescalerType; + typedef otb::ImageDimensionalityReductionFilter< + FloatVectorImageType, + FloatVectorImageType, + MaskImageType> DimensionalityReductionFilterType; + typedef DimensionalityReductionFilterType::Pointer DimensionalityReductionFilterPointerType; + typedef DimensionalityReductionFilterType::ModelType ModelType; + typedef ModelType::Pointer ModelPointerType; + typedef DimensionalityReductionFilterType::ValueType ValueType; + typedef DimensionalityReductionFilterType::LabelType LabelType; + typedef otb::DimensionalityReductionModelFactory< + ValueType, LabelType> DimensionalityReductionModelFactoryType; protected: @@ -115,7 +125,8 @@ private: void DoInit() ITK_OVERRIDE { SetName("DimensionalityReduction"); - SetDescription("Performs dimensionality reduction of the input image according to a dimensionality reduction model file."); + SetDescription("Performs dimensionality reduction of the input image " + "according to a dimensionality reduction model file."); // Documentation SetDocName("DimensionalityReduction"); @@ -185,12 +196,14 @@ private: // Load DR model using a factory otbAppLogINFO("Loading model"); - m_Model = DimensionalityReductionModelFactoryType::CreateDimensionalityReductionModel(GetParameterString("model"), - DimensionalityReductionModelFactoryType::ReadMode); + m_Model = DimensionalityReductionModelFactoryType::CreateDimensionalityReductionModel( + GetParameterString("model"), + DimensionalityReductionModelFactoryType::ReadMode); if (m_Model.IsNull()) { - otbAppLogFATAL(<< "Error when loading model " << GetParameterString("model") << " : unsupported model type"); + otbAppLogFATAL(<< "Error when loading model " << GetParameterString("model") + << " : unsupported model type"); } m_Model->Load(GetParameterString("model")); @@ -203,7 +216,7 @@ private: FloatVectorImageType::Pointer outputImage = m_ClassificationFilter->GetOutput(); // Normalize input image if asked - if(IsParameterEnabled("imstat") ) + if( IsParameterEnabled("imstat") ) { otbAppLogINFO("Input image normalization activated."); // Normalize input image (optional) @@ -236,7 +249,6 @@ private: m_ClassificationFilter->SetInput(inImage); } - if(IsParameterEnabled("mask")) { otbAppLogINFO("Using input mask"); @@ -247,7 +259,6 @@ private: } SetParameterOutputImage<FloatVectorImageType>("out", outputImage); - } DimensionalityReductionFilterType::Pointer m_ClassificationFilter; @@ -256,8 +267,7 @@ private: OutputRescalerType::Pointer m_OutRescaler; }; - -} -} +} // end of namespace Wrapper +} // end of namespace otb OTB_APPLICATION_EXPORT(otb::Wrapper::ImageDimensionalityReduction) diff --git a/Modules/Applications/AppDimensionalityReduction/app/otbTrainDimensionalityReduction.cxx b/Modules/Applications/AppDimensionalityReduction/app/otbTrainDimensionalityReduction.cxx index 8b378f4f00..fa3dd7bdcb 100644 --- a/Modules/Applications/AppDimensionalityReduction/app/otbTrainDimensionalityReduction.cxx +++ b/Modules/Applications/AppDimensionalityReduction/app/otbTrainDimensionalityReduction.cxx @@ -28,18 +28,21 @@ #include "otbShiftScaleSampleListFilter.h" #include "otbStatisticsXMLFileReader.h" -//#include "otbSharkUtils.h" - #include <fstream> // write the model file #include "otbDimensionalityReductionModelFactory.h" #include "otbTrainDimensionalityReductionApplicationBase.h" - namespace otb { namespace Wrapper { + +/** + * \class TrainDimensionalityReduction + * + * Training of a dimensionality reduction model + */ class TrainDimensionalityReduction : public TrainDimensionalityReductionApplicationBase<float,float> { public: @@ -47,62 +50,64 @@ public: typedef TrainDimensionalityReductionApplicationBase<float, float> Superclass; typedef itk::SmartPointer<Self> Pointer; typedef itk::SmartPointer<const Self> ConstPointer; - + itkNewMacro(Self); itkTypeMacro(TrainDimensionalityReduction, otb::Application); - typedef Superclass::SampleType SampleType; typedef Superclass::ListSampleType ListSampleType; typedef Superclass::SampleImageType SampleImageType; - + typedef float ValueType; typedef itk::VariableLengthVector<ValueType> MeasurementType; typedef otb::StatisticsXMLFileReader<SampleType> StatisticsReader; typedef otb::Statistics::ShiftScaleSampleListFilter<ListSampleType, ListSampleType> ShiftScaleFilterType; - + typedef otb::DimensionalityReductionModelFactory<ValueType, ValueType> ModelFactoryType; - + private: void DoInit() { SetName("TrainDimensionalityReduction"); - SetDescription("Trainer for the dimensionality reduction algorithms used in the ImageDimensionalityReduction and VectorDimensionalityReduction applications."); - + SetDescription("Trainer for the dimensionality reduction algorithms used in" + " the ImageDimensionalityReduction and VectorDimensionalityReduction applications."); + AddParameter(ParameterType_Group, "io", "Input and output data"); SetParameterDescription("io", "This group of parameters allows setting input and output data."); AddParameter(ParameterType_InputVectorData, "io.vd", "Input Vector Data"); - SetParameterDescription("io.vd", "Input geometries used for training (note : all geometries from the layer will be used)"); + SetParameterDescription("io.vd", "Input geometries used for training (note " + ": all geometries from the layer will be used)"); AddParameter(ParameterType_OutputFilename, "io.out", "Output model"); SetParameterDescription("io.out", "Output file containing the estimated model (.txt format)."); - - + + AddParameter(ParameterType_InputFilename, "io.stats", "Input XML image statistics file"); MandatoryOff("io.stats"); SetParameterDescription("io.stats", "XML file containing mean and variance of each feature."); AddParameter(ParameterType_StringList, "feat", "Field names to be used for training."); // - SetParameterDescription("feat","List of field names in the input vector data used as features for training."); // - + SetParameterDescription("feat","List of field names in the input vector data" + " used as features for training."); // + Superclass::DoInit(); AddRAMParameter(); } - + void DoUpdateParameters() { } - - void DoExecute() - { + void DoExecute() + { std::string shapefile = GetParameterString("io.vd"); - otb::ogr::DataSource::Pointer source = otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Read); + otb::ogr::DataSource::Pointer source = + otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Read); otb::ogr::Layer layer = source->GetLayer(0); ListSampleType::Pointer input = ListSampleType::New(); const int nbFeatures = GetParameterStringList("feat").size(); @@ -120,10 +125,10 @@ private: } input->PushBack(mv); } - + MeasurementType meanMeasurementVector; MeasurementType stddevMeasurementVector; - + if (HasValue("io.stats") && IsParameterEnabled("io.stats")) { StatisticsReader::Pointer statisticsReader = StatisticsReader::New(); @@ -139,7 +144,7 @@ private: stddevMeasurementVector.SetSize(nbFeatures); stddevMeasurementVector.Fill(1.); } - + ShiftScaleFilterType::Pointer trainingShiftScaleFilter = ShiftScaleFilterType::New(); trainingShiftScaleFilter->SetInput(input); trainingShiftScaleFilter->SetShifts(meanMeasurementVector); @@ -147,19 +152,13 @@ private: trainingShiftScaleFilter->Update(); ListSampleType::Pointer trainingListSample= trainingShiftScaleFilter->GetOutput(); - - + this->Train(trainingListSample,GetParameterString("io.out")); } - - - - }; - -} -} +} // end of namespace Wrapper +} // end of namespace otb OTB_APPLICATION_EXPORT(otb::Wrapper::TrainDimensionalityReduction) diff --git a/Modules/Applications/AppDimensionalityReduction/app/otbVectorDimensionalityReduction.cxx b/Modules/Applications/AppDimensionalityReduction/app/otbVectorDimensionalityReduction.cxx index b755ddeeba..dd4e89097b 100644 --- a/Modules/Applications/AppDimensionalityReduction/app/otbVectorDimensionalityReduction.cxx +++ b/Modules/Applications/AppDimensionalityReduction/app/otbVectorDimensionalityReduction.cxx @@ -32,356 +32,356 @@ namespace otb { namespace Wrapper { - + /** Utility function to negate std::isalnum */ bool IsNotAlphaNum(char c) { return !std::isalnum(c); } +/** + * \class VectorDimensionalityReduction + * + * Apply a dimensionality reduction model on a vector file + */ class VectorDimensionalityReduction : public Application { - public: - - /** Standard class typedefs. */ +public: + /** Standard class typedefs. */ typedef VectorDimensionalityReduction Self; typedef Application Superclass; - typedef itk::SmartPointer<Self> Pointer; - typedef itk::SmartPointer<const Self> ConstPointer; - - /** Standard macro */ - itkNewMacro(Self); - itkTypeMacro(Self, Application) - - /** Filters typedef */ - - typedef float ValueType; - typedef itk::VariableLengthVector<ValueType> InputSampleType; - typedef itk::Statistics::ListSample<InputSampleType> ListSampleType; - typedef MachineLearningModel<itk::VariableLengthVector<ValueType>, itk::VariableLengthVector<ValueType>> DimensionalityReductionModelType; - typedef DimensionalityReductionModelFactory<ValueType,ValueType> DimensionalityReductionModelFactoryType; - typedef DimensionalityReductionModelType::Pointer ModelPointerType; - - /** Statistics Filters typedef */ - typedef itk::VariableLengthVector<ValueType> MeasurementType; - typedef otb::StatisticsXMLFileReader<MeasurementType> StatisticsReader; - typedef otb::Statistics::ShiftScaleSampleListFilter<ListSampleType, ListSampleType> ShiftScaleFilterType; + typedef itk::SmartPointer<Self> Pointer; + typedef itk::SmartPointer<const Self> ConstPointer; + + /** Standard macro */ + itkNewMacro(Self); + itkTypeMacro(Self, Application) + + /** Filters typedef */ + typedef float ValueType; + typedef itk::VariableLengthVector<ValueType> InputSampleType; + typedef itk::Statistics::ListSample<InputSampleType> ListSampleType; + typedef MachineLearningModel< + itk::VariableLengthVector<ValueType>, + itk::VariableLengthVector<ValueType> > DimensionalityReductionModelType; + typedef DimensionalityReductionModelFactory< + ValueType,ValueType> DimensionalityReductionModelFactoryType; + typedef DimensionalityReductionModelType::Pointer ModelPointerType; + + /** Statistics Filters typedef */ + typedef itk::VariableLengthVector<ValueType> MeasurementType; + typedef otb::StatisticsXMLFileReader<MeasurementType> StatisticsReader; + typedef otb::Statistics::ShiftScaleSampleListFilter< + ListSampleType, ListSampleType> ShiftScaleFilterType; + +protected: ~VectorDimensionalityReduction() ITK_OVERRIDE - { - DimensionalityReductionModelFactoryType::CleanFactories(); - } - - private: - - void DoInit() ITK_OVERRIDE - { - SetName("VectorDimensionalityReduction"); - SetDescription("Performs dimensionality reduction of the input vector data according to a model file."); - SetDocName("Vector Dimensionality Reduction"); - SetDocAuthors("OTB-Team"); - SetDocLongDescription("This application performs a vector data dimensionality reduction based on a model file produced by the TrainDimensionalityReduction application."); - SetDocSeeAlso("TrainDimensionalityReduction"); - AddDocTag(Tags::Learning); - - AddParameter(ParameterType_InputVectorData, "in", "Name of the input vector data"); - SetParameterDescription("in","The input vector data to reduce."); - - AddParameter(ParameterType_InputFilename, "instat", "Statistics file"); - SetParameterDescription("instat", "A XML file containing mean and standard deviation to center" - "and reduce samples before dimensionality reduction (produced by ComputeImagesStatistics application)."); - MandatoryOff("instat"); - - AddParameter(ParameterType_InputFilename, "model", "Model file"); - SetParameterDescription("model", "A model file (produced by the TrainDimensionalityReduction application,"); - - AddParameter(ParameterType_ListView, "feat", "Input features to use for reduction."); // - SetParameterDescription("feat","List of field names in the input vector data used as features for reduction."); // - - AddParameter(ParameterType_StringList, "featout", "Names of the new output features."); // - SetParameterDescription("featout","List of field names for the output features which result from the reduction."); // - - AddParameter(ParameterType_OutputFilename, "out", "Output vector data file containing the reduced vector"); - SetParameterDescription("out","Output vector data file storing sample values (OGR format)." - "If not given, the input vector data file is used. In overwrite mode, the original features will be lost."); - MandatoryOff("out"); - - AddParameter(ParameterType_Int, "indim", "Dimension of the input vector"); - SetParameterDescription("indim","Dimension of the whole input vector, this value is required if only a part of the bands contained in the vector are used." - "If not given, the dimension is deduced from the length of the 'feat' parameter"); - MandatoryOff("indim"); - - AddParameter(ParameterType_Int, "pcadim", "Principal component"); // - SetParameterDescription("pcadim","This optional parameter can be set to reduce the number of eignevectors used in the PCA model file."); // - MandatoryOff("pcadim"); - - AddParameter(ParameterType_String, "mode", "Writting mode"); // - SetParameterString("mode","overwrite", false); - SetParameterDescription("mode","This parameter determines if the output file is overwritten or updated [overwrite/update]. If an output file name is given, the original file is copied before creating the new features."); // - - - // Doc example parameter settings - SetDocExampleParameterValue("in", "vectorData.shp"); - SetDocExampleParameterValue("instat", "meanVar.xml"); - SetDocExampleParameterValue("model", "model.txt"); - SetDocExampleParameterValue("out", "vectorDataOut.shp"); - SetDocExampleParameterValue("feat", "perimeter area width"); - SetDocExampleParameterValue("featout", "perimeter area width"); - //SetOfficialDocLink(); - } - // - void DoUpdateParameters() ITK_OVERRIDE - { - - if ( HasValue("in") ) - { - - std::string shapefile = GetParameterString("in"); - otb::ogr::DataSource::Pointer ogrDS; - OGRSpatialReference oSRS(""); - std::vector<std::string> options; - ogrDS = otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Read); - otb::ogr::Layer layer = ogrDS->GetLayer(0); - OGRFeatureDefn &layerDefn = layer.GetLayerDefn(); - ClearChoices("feat"); - //ClearChoices("featout"); - - for(int iField=0; iField< layerDefn.GetFieldCount(); iField++) - { - std::string item = layerDefn.GetFieldDefn(iField)->GetNameRef(); - std::string key(item); - std::string::iterator end = std::remove_if( key.begin(), key.end(), IsNotAlphaNum ); - std::transform( key.begin(), end, key.begin(), tolower ); - /* - key.erase( std::remove_if(key.begin(),key.end(),IsNotAlphaNum), key.end()); - std::transform(key.begin(), key.end(), key.begin(), tolower);*/ - //OGRFieldType fieldType = layerDefn.GetFieldDefn(iField)->GetType(); - /* if(fieldType == OFTInteger || ogr::version_proxy::IsOFTInteger64(fieldType) || fieldType == OFTReal) - {*/ - //std::string tmpKey="feat."+key; - std::string tmpKey = "feat." + key.substr( 0, static_cast<unsigned long>( end - key.begin() ) ); - AddChoice(tmpKey,item); - //} // this is the same as in otbVectorClassifier, but it doesnt work - } - - } - - } - - void DoExecute() ITK_OVERRIDE - { - clock_t tic = clock(); - - - std::string shapefile = GetParameterString("in"); - otb::ogr::DataSource::Pointer source = otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Read); - otb::ogr::Layer layer = source->GetLayer(0); - ListSampleType::Pointer input = ListSampleType::New(); - int nbFeatures = GetSelectedItems("feat").size(); - - input->SetMeasurementVectorSize(nbFeatures); - otb::ogr::Layer::const_iterator it = layer.cbegin(); - otb::ogr::Layer::const_iterator itEnd = layer.cend(); - - for( ; it!=itEnd ; ++it) - { - MeasurementType mv; - mv.SetSize(nbFeatures); - - for(int idx=0; idx < nbFeatures; ++idx) - { - mv[idx] = static_cast<float>( (*it)[GetSelectedItems("feat")[idx]].GetValue<double>() ); - - } - input->PushBack(mv); - - } - - - /** Statistics for shift/scale */ - - MeasurementType meanMeasurementVector; - MeasurementType stddevMeasurementVector; - - if (HasValue("instat") && IsParameterEnabled("instat")) - { - StatisticsReader::Pointer statisticsReader = StatisticsReader::New(); - std::string XMLfile = GetParameterString("instat"); - statisticsReader->SetFileName(XMLfile); - meanMeasurementVector = statisticsReader->GetStatisticVectorByName("mean"); - stddevMeasurementVector = statisticsReader->GetStatisticVectorByName("stddev"); - } - else - { - meanMeasurementVector.SetSize(nbFeatures); - meanMeasurementVector.Fill(0.); - stddevMeasurementVector.SetSize(nbFeatures); - stddevMeasurementVector.Fill(1.); - } - - ShiftScaleFilterType::Pointer trainingShiftScaleFilter = ShiftScaleFilterType::New(); - trainingShiftScaleFilter->SetInput(input); - trainingShiftScaleFilter->SetShifts(meanMeasurementVector); - trainingShiftScaleFilter->SetScales(stddevMeasurementVector); - trainingShiftScaleFilter->Update(); - otbAppLogINFO("mean used: " << meanMeasurementVector); - otbAppLogINFO("standard deviation used: " << stddevMeasurementVector); - otbAppLogINFO("Loading model"); - - - /** Read the model */ - - m_Model = DimensionalityReductionModelFactoryType::CreateDimensionalityReductionModel(GetParameterString("model"), - DimensionalityReductionModelFactoryType::ReadMode); - if (m_Model.IsNull()) - { - otbAppLogFATAL(<< "Error when loading model " << GetParameterString("model") << " : unsupported model type"); - } - if (HasValue("pcadim") && IsParameterEnabled("pcadim")) - { - int dimension = GetParameterInt("pcadim"); - m_Model->SetDimension(dimension ); - } - - - m_Model->Load(GetParameterString("model")); - otbAppLogINFO("Model loaded"); - - /** Perform Dimensionality Reduction */ - - ListSampleType::Pointer listSample = trainingShiftScaleFilter->GetOutput(); - ListSampleType::Pointer target = m_Model->PredictBatch(listSample); - - /** Create/Update Output Shape file */ - - ogr::DataSource::Pointer output; - ogr::DataSource::Pointer buffer = ogr::DataSource::New(); - bool updateMode = false; - - int nbBands = nbFeatures; - if (HasValue("indim") && IsParameterEnabled("indim")) - {nbBands = GetParameterInt("indim");} - - - if (IsParameterEnabled("out") && HasValue("out")) - { - // Create new OGRDataSource - if (GetParameterString("mode")=="overwrite") - { - output = ogr::DataSource::New(GetParameterString("out"), ogr::DataSource::Modes::Overwrite); - otb::ogr::Layer newLayer = output->CreateLayer(GetParameterString("out"), - const_cast<OGRSpatialReference*>(layer.GetSpatialRef()), - layer.GetGeomType()); - // Copy existing fields - OGRFeatureDefn &inLayerDefn = layer.GetLayerDefn(); - for (int k=0 ; k<inLayerDefn.GetFieldCount()-nbBands ; k++) // we don't copy the original bands - { - OGRFieldDefn fieldDefn(inLayerDefn.GetFieldDefn(k)); - newLayer.CreateField(fieldDefn); - } - } - else if (GetParameterString("mode")=="update") - { - //output = ogr::DataSource::New(GetParameterString("out"), ogr::DataSource::Modes::Update_LayerCreateOnly); - // Update mode - otb::ogr::DataSource::Pointer source_output = otb::ogr::DataSource::New(GetParameterString("out"), otb::ogr::DataSource::Modes::Read); - layer = source_output->GetLayer(0); - updateMode = true; - otbAppLogINFO("Update input vector data."); - - // fill temporary buffer for the transfer - otb::ogr::Layer inputLayer = layer; - layer = buffer->CopyLayer(inputLayer, std::string("Buffer")); - // close input data source - source_output->Clear(); - // Re-open input data source in update mode - output = otb::ogr::DataSource::New(GetParameterString("out"), otb::ogr::DataSource::Modes::Update_LayerUpdate); - - } - else - { - otbAppLogFATAL(<< "Error when creating the output file" << GetParameterString("mode") << " : unsupported writting mode type"); - } - - - } - - - - - otb::ogr::Layer outLayer = output->GetLayer(0); - - OGRErr errStart = outLayer.ogr().StartTransaction(); - - if (errStart != OGRERR_NONE) - { - itkExceptionMacro(<< "Unable to start transaction for OGR layer " << outLayer.ogr().GetName() << "."); - } - - // Add the field of prediction in the output layer if field not exist - - for (unsigned int i=0; i<GetParameterStringList("featout").size() ;i++) - { - OGRFeatureDefn &layerDefn = outLayer.GetLayerDefn(); - int idx = layerDefn.GetFieldIndex(GetParameterStringList("featout")[i].c_str()); - - if (idx >= 0) - { - if (layerDefn.GetFieldDefn(idx)->GetType() != OFTReal) - itkExceptionMacro("Field name "<< GetParameterStringList("featout")[i] << " already exists with a different type!"); - } - else - { - OGRFieldDefn predictedField(GetParameterStringList("featout")[i].c_str(), OFTReal); - ogr::FieldDefn predictedFieldDef(predictedField); - outLayer.CreateField(predictedFieldDef); - } - } - - - // Fill output layer - - unsigned int count=0; - auto classfieldname = GetParameterStringList("featout"); - it = layer.cbegin(); - itEnd = layer.cend(); - - for( ; it!=itEnd ; ++it, ++count) - { - ogr::Feature dstFeature(outLayer.GetLayerDefn()); - - dstFeature.SetFrom( *it , TRUE); - dstFeature.SetFID(it->GetFID()); - - - - for (std::size_t i=0; i<classfieldname.size(); ++i){ - dstFeature[classfieldname[i]].SetValue<double>(target->GetMeasurementVector(count)[i]); - } - if (updateMode) - { - outLayer.SetFeature(dstFeature); - } - else - { - outLayer.CreateFeature(dstFeature); - } - } - - if(outLayer.ogr().TestCapability("Transactions")) - { - const OGRErr errCommitX = outLayer.ogr().CommitTransaction(); - if (errCommitX != OGRERR_NONE) - { - itkExceptionMacro(<< "Unable to commit transaction for OGR layer " << outLayer.ogr().GetName() << "."); - } - } - output->SyncToDisk(); - clock_t toc = clock(); - otbAppLogINFO( "Elapsed: "<< ((double)(toc - tic) / CLOCKS_PER_SEC)<<" seconds."); - } - - ModelPointerType m_Model; + { + DimensionalityReductionModelFactoryType::CleanFactories(); + } + +private: + void DoInit() ITK_OVERRIDE + { + SetName("VectorDimensionalityReduction"); + SetDescription("Performs dimensionality reduction of the input vector data " + "according to a model file."); + SetDocName("Vector Dimensionality Reduction"); + SetDocAuthors("OTB-Team"); + SetDocLongDescription("This application performs a vector data " + "dimensionality reduction based on a model file produced by the " + "TrainDimensionalityReduction application."); + SetDocSeeAlso("TrainDimensionalityReduction"); + AddDocTag(Tags::Learning); + + AddParameter(ParameterType_InputVectorData, "in", "Name of the input vector data"); + SetParameterDescription("in","The input vector data to reduce."); + + AddParameter(ParameterType_InputFilename, "instat", "Statistics file"); + SetParameterDescription("instat", "A XML file containing mean and standard " + "deviation to center and reduce samples before dimensionality reduction " + "(produced by ComputeImagesStatistics application)."); + MandatoryOff("instat"); + + AddParameter(ParameterType_InputFilename, "model", "Model file"); + SetParameterDescription("model", "A model file (produced by the " + "TrainDimensionalityReduction application,"); + + AddParameter(ParameterType_ListView, "feat", "Input features to use for reduction."); // + SetParameterDescription("feat","List of field names in the input vector " + "data used as features for reduction."); // + + AddParameter(ParameterType_StringList, "featout", "Names of the new output features."); // + SetParameterDescription("featout","List of field names for the output " + "features which result from the reduction."); // + + AddParameter(ParameterType_OutputFilename, "out", "Output vector data file " + "containing the reduced vector"); + SetParameterDescription("out","Output vector data file storing sample " + "values (OGR format). If not given, the input vector data file is used. " + "In overwrite mode, the original features will be lost."); + MandatoryOff("out"); + + AddParameter(ParameterType_Int, "indim", "Dimension of the input vector"); + SetParameterDescription("indim","Dimension of the whole input vector, this " + "value is required if only a part of the bands contained in the vector " + "are used. If not given, the dimension is deduced from the length of the " + "'feat' parameter"); + MandatoryOff("indim"); + + AddParameter(ParameterType_Int, "pcadim", "Principal component"); // + SetParameterDescription("pcadim","This optional parameter can be set to " + "reduce the number of eignevectors used in the PCA model file."); // + MandatoryOff("pcadim"); + + AddParameter(ParameterType_String, "mode", "Writting mode"); // + SetParameterString("mode","overwrite", false); + SetParameterDescription("mode","This parameter determines if the output " + "file is overwritten or updated [overwrite/update]. If an output file " + "name is given, the original file is copied before creating the new features."); // + + // Doc example parameter settings + SetDocExampleParameterValue("in", "vectorData.shp"); + SetDocExampleParameterValue("instat", "meanVar.xml"); + SetDocExampleParameterValue("model", "model.txt"); + SetDocExampleParameterValue("out", "vectorDataOut.shp"); + SetDocExampleParameterValue("feat", "perimeter area width"); + SetDocExampleParameterValue("featout", "perimeter area width"); + //SetOfficialDocLink(); + } + + void DoUpdateParameters() ITK_OVERRIDE + { + if ( HasValue("in") ) + { + std::string shapefile = GetParameterString("in"); + otb::ogr::DataSource::Pointer ogrDS; + OGRSpatialReference oSRS(""); + std::vector<std::string> options; + ogrDS = otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Read); + otb::ogr::Layer layer = ogrDS->GetLayer(0); + OGRFeatureDefn &layerDefn = layer.GetLayerDefn(); + ClearChoices("feat"); + + for(int iField=0; iField< layerDefn.GetFieldCount(); iField++) + { + std::string item = layerDefn.GetFieldDefn(iField)->GetNameRef(); + std::string key(item); + std::string::iterator end = std::remove_if( key.begin(), key.end(), IsNotAlphaNum ); + std::transform( key.begin(), end, key.begin(), tolower ); + std::string tmpKey = "feat." + key.substr( 0, static_cast<unsigned long>( end - key.begin() ) ); + AddChoice(tmpKey,item); + } + } + } + + void DoExecute() ITK_OVERRIDE + { + clock_t tic = clock(); + + std::string shapefile = GetParameterString("in"); + otb::ogr::DataSource::Pointer source = otb::ogr::DataSource::New( + shapefile, otb::ogr::DataSource::Modes::Read); + otb::ogr::Layer layer = source->GetLayer(0); + ListSampleType::Pointer input = ListSampleType::New(); + int nbFeatures = GetSelectedItems("feat").size(); + + input->SetMeasurementVectorSize(nbFeatures); + otb::ogr::Layer::const_iterator it = layer.cbegin(); + otb::ogr::Layer::const_iterator itEnd = layer.cend(); + + for( ; it!=itEnd ; ++it) + { + MeasurementType mv; + mv.SetSize(nbFeatures); + + for(int idx=0; idx < nbFeatures; ++idx) + { + mv[idx] = static_cast<float>( (*it)[GetSelectedItems("feat")[idx]].GetValue<double>() ); + } + input->PushBack(mv); + } + + /** Statistics for shift/scale */ + MeasurementType meanMeasurementVector; + MeasurementType stddevMeasurementVector; + + if (HasValue("instat") && IsParameterEnabled("instat")) + { + StatisticsReader::Pointer statisticsReader = StatisticsReader::New(); + std::string XMLfile = GetParameterString("instat"); + statisticsReader->SetFileName(XMLfile); + meanMeasurementVector = statisticsReader->GetStatisticVectorByName("mean"); + stddevMeasurementVector = statisticsReader->GetStatisticVectorByName("stddev"); + } + else + { + meanMeasurementVector.SetSize(nbFeatures); + meanMeasurementVector.Fill(0.); + stddevMeasurementVector.SetSize(nbFeatures); + stddevMeasurementVector.Fill(1.); + } + + ShiftScaleFilterType::Pointer trainingShiftScaleFilter = ShiftScaleFilterType::New(); + trainingShiftScaleFilter->SetInput(input); + trainingShiftScaleFilter->SetShifts(meanMeasurementVector); + trainingShiftScaleFilter->SetScales(stddevMeasurementVector); + trainingShiftScaleFilter->Update(); + otbAppLogINFO("mean used: " << meanMeasurementVector); + otbAppLogINFO("standard deviation used: " << stddevMeasurementVector); + otbAppLogINFO("Loading model"); + + /** Read the model */ + m_Model = DimensionalityReductionModelFactoryType::CreateDimensionalityReductionModel( + GetParameterString("model"), + DimensionalityReductionModelFactoryType::ReadMode); + if (m_Model.IsNull()) + { + otbAppLogFATAL(<< "Error when loading model " << GetParameterString("model") + << " : unsupported model type"); + } + if (HasValue("pcadim") && IsParameterEnabled("pcadim")) + { + int dimension = GetParameterInt("pcadim"); + m_Model->SetDimension(dimension ); + } + + m_Model->Load(GetParameterString("model")); + otbAppLogINFO("Model loaded"); + + /** Perform Dimensionality Reduction */ + ListSampleType::Pointer listSample = trainingShiftScaleFilter->GetOutput(); + ListSampleType::Pointer target = m_Model->PredictBatch(listSample); + + /** Create/Update Output Shape file */ + ogr::DataSource::Pointer output; + ogr::DataSource::Pointer buffer = ogr::DataSource::New(); + bool updateMode = false; + + int nbBands = nbFeatures; + if (HasValue("indim") && IsParameterEnabled("indim")) + { + nbBands = GetParameterInt("indim"); + } + + if (IsParameterEnabled("out") && HasValue("out")) + { + // Create new OGRDataSource + if (GetParameterString("mode")=="overwrite") + { + output = ogr::DataSource::New(GetParameterString("out"), ogr::DataSource::Modes::Overwrite); + otb::ogr::Layer newLayer = output->CreateLayer( + GetParameterString("out"), + const_cast<OGRSpatialReference*>(layer.GetSpatialRef()), + layer.GetGeomType()); + // Copy existing fields + OGRFeatureDefn &inLayerDefn = layer.GetLayerDefn(); + for (int k=0 ; k<inLayerDefn.GetFieldCount()-nbBands ; k++) // we don't copy the original bands + { + OGRFieldDefn fieldDefn(inLayerDefn.GetFieldDefn(k)); + newLayer.CreateField(fieldDefn); + } + } + else if (GetParameterString("mode")=="update") + { + //output = ogr::DataSource::New(GetParameterString("out"), ogr::DataSource::Modes::Update_LayerCreateOnly); + // Update mode + otb::ogr::DataSource::Pointer source_output = + otb::ogr::DataSource::New(GetParameterString("out"), otb::ogr::DataSource::Modes::Read); + layer = source_output->GetLayer(0); + updateMode = true; + otbAppLogINFO("Update input vector data."); + + // fill temporary buffer for the transfer + otb::ogr::Layer inputLayer = layer; + layer = buffer->CopyLayer(inputLayer, std::string("Buffer")); + // close input data source + source_output->Clear(); + // Re-open input data source in update mode + output = otb::ogr::DataSource::New( + GetParameterString("out"), + otb::ogr::DataSource::Modes::Update_LayerUpdate); + } + else + { + otbAppLogFATAL(<< "Error when creating the output file" << + GetParameterString("mode") << " : unsupported writting mode type"); + } + } + + otb::ogr::Layer outLayer = output->GetLayer(0); + OGRErr errStart = outLayer.ogr().StartTransaction(); + + if (errStart != OGRERR_NONE) + { + itkExceptionMacro(<< "Unable to start transaction for OGR layer " << outLayer.ogr().GetName() << "."); + } + + // Add the field of prediction in the output layer if field not exist + for (unsigned int i=0; i<GetParameterStringList("featout").size() ;i++) + { + OGRFeatureDefn &layerDefn = outLayer.GetLayerDefn(); + int idx = layerDefn.GetFieldIndex(GetParameterStringList("featout")[i].c_str()); + + if (idx >= 0) + { + if (layerDefn.GetFieldDefn(idx)->GetType() != OFTReal) + itkExceptionMacro("Field name "<< GetParameterStringList("featout")[i] + << " already exists with a different type!"); + } + else + { + OGRFieldDefn predictedField(GetParameterStringList("featout")[i].c_str(), OFTReal); + ogr::FieldDefn predictedFieldDef(predictedField); + outLayer.CreateField(predictedFieldDef); + } + } + + // Fill output layer + unsigned int count=0; + auto classfieldname = GetParameterStringList("featout"); + it = layer.cbegin(); + itEnd = layer.cend(); + + for( ; it!=itEnd ; ++it, ++count) + { + ogr::Feature dstFeature(outLayer.GetLayerDefn()); + + dstFeature.SetFrom( *it , TRUE); + dstFeature.SetFID(it->GetFID()); + + for (std::size_t i=0; i<classfieldname.size(); ++i) + { + dstFeature[classfieldname[i]].SetValue<double>(target->GetMeasurementVector(count)[i]); + } + if (updateMode) + { + outLayer.SetFeature(dstFeature); + } + else + { + outLayer.CreateFeature(dstFeature); + } + } + + if(outLayer.ogr().TestCapability("Transactions")) + { + const OGRErr errCommitX = outLayer.ogr().CommitTransaction(); + if (errCommitX != OGRERR_NONE) + { + itkExceptionMacro(<< "Unable to commit transaction for OGR layer " << + outLayer.ogr().GetName() << "."); + } + } + output->SyncToDisk(); + clock_t toc = clock(); + otbAppLogINFO( "Elapsed: "<< ((double)(toc - tic) / CLOCKS_PER_SEC)<<" seconds."); + } + + ModelPointerType m_Model; }; -} -} + +} // end of namespace Wrapper +} // end of namespace otb + OTB_APPLICATION_EXPORT(otb::Wrapper::VectorDimensionalityReduction) diff --git a/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainAutoencoder.txx b/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainAutoencoder.txx index 3e9107a9a8..efcbd303b9 100644 --- a/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainAutoencoder.txx +++ b/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainAutoencoder.txx @@ -33,28 +33,22 @@ void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> ::InitAutoencoderParams() { - - AddChoice("algorithm.tiedautoencoder", "Shark Tied Autoencoder"); AddChoice("algorithm.autoencoder", "Shark Autoencoder"); SetParameterDescription("algorithm.autoencoder", "This group of parameters allows setting Shark autoencoder parameters. " ); - - + //Tied Autoencoder AddParameter(ParameterType_Choice, "algorithm.autoencoder.istied", "tied weighth <tied/untied>"); SetParameterDescription( "algorithm.autoencoder.istied", "Parameter that determine if the weights are tied or not <tied/untied>"); - - + AddChoice("algorithm.autoencoder.istied.yes","Tied weigths"); AddChoice("algorithm.autoencoder.istied.no","Untied weights"); - - - + //Number Of Iterations AddParameter(ParameterType_Int, "algorithm.autoencoder.nbiter", "Maximum number of iterations during training"); @@ -62,47 +56,43 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> SetParameterDescription( "algorithm.autoencoder.nbiter", "The maximum number of iterations used during training."); - + AddParameter(ParameterType_Int, "algorithm.autoencoder.nbiterfinetuning", "Maximum number of iterations during training"); SetParameterInt("algorithm.autoencoder.nbiterfinetuning",0, false); SetParameterDescription( "algorithm.autoencoder.nbiterfinetuning", "The maximum number of iterations used during fine tuning of the whole network."); - + AddParameter(ParameterType_Float, "algorithm.autoencoder.epsilon", - " "); + "Epsilon"); SetParameterFloat("algorithm.autoencoder.epsilon",0, false); SetParameterDescription( "algorithm.autoencoder.epsilon", - " "); - - + "Epsilon"); + AddParameter(ParameterType_Float, "algorithm.autoencoder.initfactor", - " "); + "Weight initialization factor"); SetParameterFloat("algorithm.autoencoder.initfactor",1, false); SetParameterDescription( - "algorithm.autoencoder.initfactor", "parameter that control the weight initialization of the autoencoder"); - + "algorithm.autoencoder.initfactor", "Parameter that control the weight initialization of the autoencoder"); + //Number Of Hidden Neurons - AddParameter(ParameterType_StringList , "algorithm.autoencoder.nbneuron", "Size"); - /*AddParameter(ParameterType_Int, "algorithm.autoencoder.nbneuron", - "Number of neurons in the hidden layer"); - SetParameterInt("algorithm.autoencoder.nbneuron",10, false);*/ + AddParameter(ParameterType_StringList, "algorithm.autoencoder.nbneuron", "Size"); SetParameterDescription( "algorithm.autoencoder.nbneuron", "The number of neurons in each hidden layer."); - + //Regularization AddParameter(ParameterType_StringList, "algorithm.autoencoder.regularization", "Strength of the regularization"); SetParameterDescription("algorithm.autoencoder.regularization", "Strength of the L2 regularization used during training"); - + //Noise strength AddParameter(ParameterType_StringList, "algorithm.autoencoder.noise", "Strength of the noise"); SetParameterDescription("algorithm.autoencoder.noise", "Strength of the noise"); - + // Sparsity parameter AddParameter(ParameterType_StringList, "algorithm.autoencoder.rho", "Sparsity parameter"); SetParameterDescription("algorithm.autoencoder.rho", @@ -116,97 +106,80 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> AddParameter(ParameterType_OutputFilename, "algorithm.autoencoder.learningcurve", "Learning curve"); SetParameterDescription("algorithm.autoencoder.learningcurve", "Learning error values"); MandatoryOff("algorithm.autoencoder.learningcurve"); - } - template <class TInputValue, class TOutputValue> void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> ::BeforeTrainAutoencoder(typename ListSampleType::Pointer trainingListSample, std::string modelPath) { - // typedef shark::Autoencoder< shark::TanhNeuron, shark::LinearNeuron> AutoencoderType; typedef shark::LogisticNeuron NeuronType; typedef otb::AutoencoderModel<InputValueType, NeuronType> AutoencoderModelType; - /* - // typedef shark::TiedAutoencoder< shark::TanhNeuron, shark::LinearNeuron> TiedAutoencoderType; - typedef shark::TiedAutoencoder< shark::TanhNeuron, shark::TanhNeuron> TiedAutoencoderType; - typedef otb::AutoencoderModel<InputValueType, TiedAutoencoderType> TiedAutoencoderModelType; - */ std::string TiedWeigth = GetParameterString("algorithm.autoencoder.istied"); std::cout << TiedWeigth << std::endl; - + if(TiedWeigth == "no") - { - TrainAutoencoder<AutoencoderModelType>(trainingListSample,modelPath); - } - /* - if(TiedWeigth == "yes") - { - TrainAutoencoder<TiedAutoencoderModelType>(trainingListSample,modelPath); - } - */ - if(TiedWeigth != "yes" && TiedWeigth != "no") - { - std::cerr << "istied : invalid choice <yes/no>" << std::endl; - } + { + TrainAutoencoder<AutoencoderModelType>(trainingListSample,modelPath); + } + if(TiedWeigth != "yes" && TiedWeigth != "no") + { + std::cerr << "istied : invalid choice <yes/no>" << std::endl; + } } - - template <class TInputValue, class TOutputValue> template <typename autoencoderchoice> void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue>::TrainAutoencoder(typename ListSampleType::Pointer trainingListSample,std::string modelPath) { - typename autoencoderchoice::Pointer dimredTrainer = autoencoderchoice::New(); - itk::Array<unsigned int> nb_neuron; - itk::Array<float> noise; - itk::Array<float> regularization; - itk::Array<float> rho; - itk::Array<float> beta; - std::vector<std::basic_string<char>> s_nbneuron= GetParameterStringList("algorithm.autoencoder.nbneuron"); - std::vector<std::basic_string<char>> s_noise= GetParameterStringList("algorithm.autoencoder.noise"); - std::vector<std::basic_string<char>> s_regularization= GetParameterStringList("algorithm.autoencoder.regularization"); - std::vector<std::basic_string<char>> s_rho= GetParameterStringList("algorithm.autoencoder.rho"); - std::vector<std::basic_string<char>> s_beta= GetParameterStringList("algorithm.autoencoder.beta"); - nb_neuron.SetSize(s_nbneuron.size()); - noise.SetSize(s_nbneuron.size()); - regularization.SetSize(s_nbneuron.size()); - rho.SetSize(s_nbneuron.size()); - beta.SetSize(s_nbneuron.size()); - for (unsigned int i=0; i<s_nbneuron.size(); i++){ - nb_neuron[i]=std::stoi(s_nbneuron[i]); - noise[i]=std::stof(s_noise[i]); - regularization[i]=std::stof(s_regularization[i]); - rho[i]=std::stof(s_rho[i]); - beta[i]=std::stof(s_beta[i]); - } - dimredTrainer->SetNumberOfHiddenNeurons(nb_neuron); - dimredTrainer->SetNumberOfIterations(GetParameterInt("algorithm.autoencoder.nbiter")); - dimredTrainer->SetNumberOfIterationsFineTuning(GetParameterInt("algorithm.autoencoder.nbiterfinetuning")); - dimredTrainer->SetEpsilon(GetParameterFloat("algorithm.autoencoder.epsilon")); - dimredTrainer->SetInitFactor(GetParameterFloat("algorithm.autoencoder.initfactor")); - dimredTrainer->SetRegularization(regularization); - dimredTrainer->SetNoise(noise); - dimredTrainer->SetRho(rho); - dimredTrainer->SetBeta(beta); - - dimredTrainer->SetWriteWeights(true); - if (HasValue("algorithm.autoencoder.learningcurve") && IsParameterEnabled("algorithm.autoencoder.learningcurve")) - { - std::cout << "yo" << std::endl; - dimredTrainer->SetWriteLearningCurve(true); - dimredTrainer->SetLearningCurveFileName(GetParameterString("algorithm.autoencoder.learningcurve")); - } - - dimredTrainer->SetInputListSample(trainingListSample); - std::cout << "before train" << std::endl; - dimredTrainer->Train(); - std::cout << "after train" << std::endl; - dimredTrainer->Save(modelPath); - + typename autoencoderchoice::Pointer dimredTrainer = autoencoderchoice::New(); + itk::Array<unsigned int> nb_neuron; + itk::Array<float> noise; + itk::Array<float> regularization; + itk::Array<float> rho; + itk::Array<float> beta; + std::vector<std::basic_string<char>> s_nbneuron= GetParameterStringList("algorithm.autoencoder.nbneuron"); + std::vector<std::basic_string<char>> s_noise= GetParameterStringList("algorithm.autoencoder.noise"); + std::vector<std::basic_string<char>> s_regularization= GetParameterStringList("algorithm.autoencoder.regularization"); + std::vector<std::basic_string<char>> s_rho= GetParameterStringList("algorithm.autoencoder.rho"); + std::vector<std::basic_string<char>> s_beta= GetParameterStringList("algorithm.autoencoder.beta"); + nb_neuron.SetSize(s_nbneuron.size()); + noise.SetSize(s_nbneuron.size()); + regularization.SetSize(s_nbneuron.size()); + rho.SetSize(s_nbneuron.size()); + beta.SetSize(s_nbneuron.size()); + for (unsigned int i=0; i<s_nbneuron.size(); i++) + { + nb_neuron[i]=std::stoi(s_nbneuron[i]); + noise[i]=std::stof(s_noise[i]); + regularization[i]=std::stof(s_regularization[i]); + rho[i]=std::stof(s_rho[i]); + beta[i]=std::stof(s_beta[i]); + } + dimredTrainer->SetNumberOfHiddenNeurons(nb_neuron); + dimredTrainer->SetNumberOfIterations(GetParameterInt("algorithm.autoencoder.nbiter")); + dimredTrainer->SetNumberOfIterationsFineTuning(GetParameterInt("algorithm.autoencoder.nbiterfinetuning")); + dimredTrainer->SetEpsilon(GetParameterFloat("algorithm.autoencoder.epsilon")); + dimredTrainer->SetInitFactor(GetParameterFloat("algorithm.autoencoder.initfactor")); + dimredTrainer->SetRegularization(regularization); + dimredTrainer->SetNoise(noise); + dimredTrainer->SetRho(rho); + dimredTrainer->SetBeta(beta); + dimredTrainer->SetWriteWeights(true); + if (HasValue("algorithm.autoencoder.learningcurve") && + IsParameterEnabled("algorithm.autoencoder.learningcurve")) + { + std::cout << "yo" << std::endl; + dimredTrainer->SetWriteLearningCurve(true); + dimredTrainer->SetLearningCurveFileName(GetParameterString("algorithm.autoencoder.learningcurve")); + } + dimredTrainer->SetInputListSample(trainingListSample); + std::cout << "before train" << std::endl; + dimredTrainer->Train(); + std::cout << "after train" << std::endl; + dimredTrainer->Save(modelPath); } } //end namespace wrapper diff --git a/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainPCA.txx b/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainPCA.txx index b04bd41eae..03016916cb 100644 --- a/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainPCA.txx +++ b/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainPCA.txx @@ -33,14 +33,11 @@ void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> ::InitPCAParams() { - - AddChoice("algorithm.pca", "Shark PCA"); SetParameterDescription("algorithm.pca", "This group of parameters allows setting Shark PCA parameters. " ); - - + //Output Dimension AddParameter(ParameterType_Int, "algorithm.pca.dim", "Dimension of the output of the pca transformation"); @@ -48,8 +45,6 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> SetParameterDescription( "algorithm.pca.dim", "Dimension of the output of the pca transformation."); - - } template <class TInputValue, class TOutputValue> @@ -57,12 +52,12 @@ void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> ::TrainPCA(typename ListSampleType::Pointer trainingListSample,std::string modelPath) { typedef otb::PCAModel<InputValueType> PCAModelType; - typename PCAModelType::Pointer dimredTrainer = PCAModelType::New(); - dimredTrainer->SetDimension(GetParameterInt("algorithm.pca.dim")); - dimredTrainer->SetInputListSample(trainingListSample); - dimredTrainer->SetWriteEigenvectors(true); - dimredTrainer->Train(); - dimredTrainer->Save(modelPath); + typename PCAModelType::Pointer dimredTrainer = PCAModelType::New(); + dimredTrainer->SetDimension(GetParameterInt("algorithm.pca.dim")); + dimredTrainer->SetInputListSample(trainingListSample); + dimredTrainer->SetWriteEigenvectors(true); + dimredTrainer->Train(); + dimredTrainer->Save(modelPath); } } //end namespace wrapper diff --git a/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainSOM.txx b/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainSOM.txx index 2cfeb1ad30..535c839e73 100644 --- a/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainSOM.txx +++ b/Modules/Applications/AppDimensionalityReduction/include/otbDimensionalityReductionTrainSOM.txx @@ -32,20 +32,19 @@ void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> ::InitSOMParams() { - AddChoice("algorithm.som", "OTB SOM"); SetParameterDescription("algorithm.som", "This group of parameters allows setting SOM parameters. " ); - AddParameter(ParameterType_Int, "algorithm.som.dim","Dimension of the map"); - SetParameterDescription("algorithm.som.dim","Dimension of the SOM map."); + AddParameter(ParameterType_Int, "algorithm.som.dim","Dimension of the map"); + SetParameterDescription("algorithm.som.dim","Dimension of the SOM map."); - AddParameter(ParameterType_StringList , "algorithm.som.s", "Size"); + AddParameter(ParameterType_StringList , "algorithm.som.s", "Size"); SetParameterDescription("algorithm.som.s", "Size of the SOM map"); MandatoryOff("algorithm.som.s"); - AddParameter(ParameterType_StringList , "algorithm.som.n", "Size Neighborhood"); + AddParameter(ParameterType_StringList , "algorithm.som.n", "Size Neighborhood"); SetParameterDescription("algorithm.som.n", "Size of the initial neighborhood in the SOM map"); MandatoryOff("algorithm.som.n"); @@ -99,74 +98,69 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> ::BeforeTrainSOM(typename ListSampleType::Pointer trainingListSample, std::string modelPath) { - //typedef SOMMap<TInputValue,itk::Statistics::EuclideanDistanceMetric<itk::VariableLengthVector<TInputValue>>, 2> Map2DType; - //typedef SOMMap<TInputValue,itk::Statistics::EuclideanDistanceMetric<itk::VariableLengthVector<TInputValue>>, 3> Map3DType; - //typedef SOMMap<TInputValue,itk::Statistics::EuclideanDistanceMetric<itk::VariableLengthVector<TInputValue>>, 4> Map4DType; - //typedef SOMMap<TInputValue,itk::Statistics::EuclideanDistanceMetric<itk::VariableLengthVector<TInputValue>>, 5> Map5DType; - typedef otb::SOMModel<InputValueType, 2> SOM2DModelType; - typedef otb::SOMModel<InputValueType, 3> SOM3DModelType; - typedef otb::SOMModel<InputValueType, 4> SOM4DModelType; - typedef otb::SOMModel<InputValueType, 5> SOM5DModelType; - int SomDim = GetParameterInt("algorithm.som.dim"); std::cout << SomDim << std::endl; - + if(SomDim == 2) - { - TrainSOM<SOM2DModelType >(trainingListSample,modelPath); - } - - if(SomDim == 3) - { - TrainSOM<SOM3DModelType >(trainingListSample,modelPath); - } - - if(SomDim == 4) - { - TrainSOM<SOM4DModelType >(trainingListSample,modelPath); - } - - if(SomDim == 5) - { - TrainSOM<SOM5DModelType >(trainingListSample,modelPath); - } - if(SomDim > 5 || SomDim < 2) - { - std::cerr << "k : invalid dimension" << std::endl; - } + { + typedef otb::SOMModel<InputValueType, 2> SOM2DModelType; + TrainSOM<SOM2DModelType >(trainingListSample,modelPath); + } + + if(SomDim == 3) + { + typedef otb::SOMModel<InputValueType, 3> SOM3DModelType; + TrainSOM<SOM3DModelType >(trainingListSample,modelPath); + } + + if(SomDim == 4) + { + typedef otb::SOMModel<InputValueType, 4> SOM4DModelType; + TrainSOM<SOM4DModelType >(trainingListSample,modelPath); + } + + if(SomDim == 5) + { + typedef otb::SOMModel<InputValueType, 5> SOM5DModelType; + TrainSOM<SOM5DModelType >(trainingListSample,modelPath); + } + if(SomDim > 5 || SomDim < 2) + { + std::cerr << "k : invalid dimension" << std::endl; + } } - template <class TInputValue, class TOutputValue> -template <typename somchoice> +template <typename TSOM> void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> ::TrainSOM(typename ListSampleType::Pointer trainingListSample,std::string modelPath) { - using TemplateEstimatorType = typename somchoice::EstimatorType; - typename somchoice::Pointer dimredTrainer = somchoice::New(); - unsigned int dim = dimredTrainer->GetDimension(); - std::cout << dim << std::endl; - dimredTrainer->SetNumberOfIterations(GetParameterInt("algorithm.som.ni")); - dimredTrainer->SetBetaInit(GetParameterFloat("algorithm.som.bi")); - dimredTrainer->SetWriteMap(true); - dimredTrainer->SetBetaEnd(GetParameterFloat("algorithm.som.bf")); - dimredTrainer->SetMaxWeight(GetParameterFloat("algorithm.som.iv")); - typename TemplateEstimatorType::SizeType size; - std::vector<std::basic_string<char>> s= GetParameterStringList("algorithm.som.s"); - for (unsigned int i=0; i<dim; i++){ - size[i]=std::stoi(s[i]); - } - - dimredTrainer->SetMapSize(size); - typename TemplateEstimatorType::SizeType radius; - std::vector<std::basic_string<char>> n= GetParameterStringList("algorithm.som.n"); - for (unsigned int i=0; i<dim; i++){ - radius[i]=std::stoi(n[i]); - } - dimredTrainer->SetNeighborhoodSizeInit(radius); - dimredTrainer->SetListSample(trainingListSample); - dimredTrainer->Train(); - dimredTrainer->Save(modelPath); + typename TSOM::Pointer dimredTrainer = TSOM::New(); + unsigned int dim = dimredTrainer->GetDimension(); + std::cout << dim << std::endl; + dimredTrainer->SetNumberOfIterations(GetParameterInt("algorithm.som.ni")); + dimredTrainer->SetBetaInit(GetParameterFloat("algorithm.som.bi")); + dimredTrainer->SetWriteMap(true); + dimredTrainer->SetBetaEnd(GetParameterFloat("algorithm.som.bf")); + dimredTrainer->SetMaxWeight(GetParameterFloat("algorithm.som.iv")); + typename TSOM::SizeType size; + std::vector<std::basic_string<char>> s= GetParameterStringList("algorithm.som.s"); + for (unsigned int i=0; i<dim; i++) + { + size[i]=std::stoi(s[i]); + } + + dimredTrainer->SetMapSize(size); + typename TSOM::SizeType radius; + std::vector<std::basic_string<char>> n= GetParameterStringList("algorithm.som.n"); + for (unsigned int i=0; i<dim; i++) + { + radius[i]=std::stoi(n[i]); + } + dimredTrainer->SetNeighborhoodSizeInit(radius); + dimredTrainer->SetListSample(trainingListSample); + dimredTrainer->Train(); + dimredTrainer->Save(modelPath); } } //end namespace wrapper diff --git a/Modules/Applications/AppDimensionalityReduction/include/otbTrainDimensionalityReductionApplicationBase.h b/Modules/Applications/AppDimensionalityReduction/include/otbTrainDimensionalityReductionApplicationBase.h index 85c12a4fb8..3fca979c8a 100644 --- a/Modules/Applications/AppDimensionalityReduction/include/otbTrainDimensionalityReductionApplicationBase.h +++ b/Modules/Applications/AppDimensionalityReduction/include/otbTrainDimensionalityReductionApplicationBase.h @@ -21,17 +21,14 @@ #define otbTrainDimensionalityReductionApplicationBase_h #include "otbConfigure.h" - #include "otbWrapperApplication.h" - -#include <iostream> +#include "otbDimensionalityReductionModelFactory.h" // ListSample #include "itkListSample.h" #include "itkVariableLengthVector.h" -//Estimator -#include "otbDimensionalityReductionModelFactory.h" +#include <iostream> namespace otb { @@ -98,30 +95,29 @@ public: typedef typename ModelType::InputListSampleType ListSampleType; protected: -TrainDimensionalityReductionApplicationBase(); -~TrainDimensionalityReductionApplicationBase(); + TrainDimensionalityReductionApplicationBase(); + ~TrainDimensionalityReductionApplicationBase() override; -/** Generic method to train and save the machine learning model. This method - * uses specific train methods depending on the chosen model.*/ -void Train(typename ListSampleType::Pointer trainingListSample, - std::string modelPath); + /** Generic method to train and save the machine learning model. This method + * uses specific train methods depending on the chosen model.*/ + void Train(typename ListSampleType::Pointer trainingListSample, + std::string modelPath); -/** Generic method to load a model file and use it to classify a sample list*/ -void Reduce(typename ListSampleType::Pointer validationListSample, - std::string modelPath); + /** Generic method to load a model file and use it to classify a sample list*/ + void Reduce(typename ListSampleType::Pointer validationListSample, + std::string modelPath); -/** Init method that creates all the parameters for machine learning models */ -void DoInit(); + /** Init method that creates all the parameters for machine learning models */ + void DoInit() override; private: -/** Specific Init and Train methods for each machine learning model */ -//@{ - -void InitSOMParams(); -template <class somchoice> -void TrainSOM(typename ListSampleType::Pointer trainingListSample, std::string modelPath); -void BeforeTrainSOM(typename ListSampleType::Pointer trainingListSample, std::string modelPath); + /** Specific Init and Train methods for each machine learning model */ + + void InitSOMParams(); + template <class somchoice> + void TrainSOM(typename ListSampleType::Pointer trainingListSample, std::string modelPath); + void BeforeTrainSOM(typename ListSampleType::Pointer trainingListSample, std::string modelPath); #ifdef OTB_USE_SHARK void InitAutoencoderParams(); @@ -132,12 +128,11 @@ void BeforeTrainSOM(typename ListSampleType::Pointer trainingListSample, std::st void TrainAutoencoder(typename ListSampleType::Pointer trainingListSample, std::string modelPath); void TrainPCA(typename ListSampleType::Pointer trainingListSample, std::string modelPath); -#endif -//@} +#endif }; -} -} +} // end of namespace Wrapper +} // end of namespace otb #ifndef OTB_MANUAL_INSTANTIATION #include "otbTrainDimensionalityReductionApplicationBase.txx" diff --git a/Modules/Applications/AppDimensionalityReduction/include/otbTrainDimensionalityReductionApplicationBase.txx b/Modules/Applications/AppDimensionalityReduction/include/otbTrainDimensionalityReductionApplicationBase.txx index d99b51caae..56a3364915 100644 --- a/Modules/Applications/AppDimensionalityReduction/include/otbTrainDimensionalityReductionApplicationBase.txx +++ b/Modules/Applications/AppDimensionalityReduction/include/otbTrainDimensionalityReductionApplicationBase.txx @@ -49,8 +49,8 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> // main choice parameter that will contain all dimensionality reduction options AddParameter(ParameterType_Choice, "algorithm", "algorithm to use for the training"); - SetParameterDescription("algorithm", "Choice of the dimensionality reduction algorithm to use for the training."); - + SetParameterDescription("algorithm", "Choice of the dimensionality reduction " + "algorithm to use for the training."); InitSOMParams(); @@ -71,48 +71,39 @@ TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> template <class TInputValue, class TOutputValue> void TrainDimensionalityReductionApplicationBase<TInputValue,TOutputValue> -::Train(typename ListSampleType::Pointer trainingListSample, - std::string modelPath) +::Train( + typename ListSampleType::Pointer trainingListSample, + std::string modelPath) { - - // get the name of the chosen machine learning model + // get the name of the chosen machine learning model const std::string modelName = GetParameterString("algorithm"); // call specific train function - + if(modelName == "som") - { - BeforeTrainSOM(trainingListSample,modelPath); - } - + { + BeforeTrainSOM(trainingListSample,modelPath); + } + if(modelName == "autoencoder") { - #ifdef OTB_USE_SHARK +#ifdef OTB_USE_SHARK BeforeTrainAutoencoder(trainingListSample,modelPath); #else otbAppLogFATAL("Module SharkLearning is not installed. You should consider turning OTB_USE_SHARK on during cmake configuration."); #endif } - /* - if(modelName == "tiedautoencoder") - { - #ifdef OTB_USE_SHARK - TrainAutoencoder<TiedAutoencoderModelType>(trainingListSample,modelPath); - #else - otbAppLogFATAL("Module SharkLearning is not installed. You should consider turning OTB_USE_SHARK on during cmake configuration."); - #endif - } - */ + if(modelName == "pca") { - #ifdef OTB_USE_SHARK - TrainPCA(trainingListSample,modelPath); - #else - otbAppLogFATAL("Module SharkLearning is not installed. You should consider turning OTB_USE_SHARK on during cmake configuration."); - #endif +#ifdef OTB_USE_SHARK + TrainPCA(trainingListSample,modelPath); +#else + otbAppLogFATAL("Module SharkLearning is not installed. You should consider turning OTB_USE_SHARK on during cmake configuration."); +#endif } } -} -} +} // end of namespace Wrapper +} // end of namespace otb #endif diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.h b/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.h index f9cc914270..0285f9d852 100644 --- a/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.h +++ b/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.h @@ -37,68 +37,75 @@ namespace otb { + +/** \class PCAModel + * + * This class wraps a PCA model implemented by Shark, in a otb::MachineLearningModel + */ template <class TInputValue> -class ITK_EXPORT PCAModel: public MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>> +class ITK_EXPORT PCAModel + : public MachineLearningModel< + itk::VariableLengthVector< TInputValue >, + itk::VariableLengthVector< TInputValue > > { - public: - - typedef PCAModel Self; - typedef MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>> Superclass; - typedef itk::SmartPointer<Self> Pointer; - typedef itk::SmartPointer<const Self> ConstPointer; - - typedef typename Superclass::InputValueType InputValueType; - typedef typename Superclass::InputSampleType InputSampleType; - typedef typename Superclass::InputListSampleType InputListSampleType; - typedef typename InputListSampleType::Pointer ListSamplePointerType; - typedef typename Superclass::TargetValueType TargetValueType; - typedef typename Superclass::TargetSampleType TargetSampleType; - typedef typename Superclass::TargetListSampleType TargetListSampleType; - - /// Confidence map related typedefs - - typedef typename Superclass::ConfidenceValueType ConfidenceValueType; - typedef typename Superclass::ConfidenceSampleType ConfidenceSampleType; - typedef typename Superclass::ConfidenceListSampleType ConfidenceListSampleType; - - - itkNewMacro(Self); - itkTypeMacro(PCAModel, DimensionalityReductionModel); -/* - unsigned int GetDimension() {return m_Dimension;}; - itkSetMacro(Dimension,unsigned int); - */ - itkSetMacro(Do_resize_flag,bool); - - itkSetMacro(WriteEigenvectors, bool); - itkGetMacro(WriteEigenvectors, bool); - - bool CanReadFile(const std::string & filename); - bool CanWriteFile(const std::string & filename); - - void Save(const std::string & filename, const std::string & name="") ITK_OVERRIDE; - void Load(const std::string & filename, const std::string & name="") ITK_OVERRIDE; - - void Train() ITK_OVERRIDE; - //void Dimensionality_reduction() {}; // Dimensionality reduction is done by DoPredict - + typedef PCAModel Self; + typedef MachineLearningModel< + itk::VariableLengthVector< TInputValue >, + itk::VariableLengthVector< TInputValue> > Superclass; + typedef itk::SmartPointer<Self> Pointer; + typedef itk::SmartPointer<const Self> ConstPointer; + + typedef typename Superclass::InputValueType InputValueType; + typedef typename Superclass::InputSampleType InputSampleType; + typedef typename Superclass::InputListSampleType InputListSampleType; + typedef typename InputListSampleType::Pointer ListSamplePointerType; + typedef typename Superclass::TargetValueType TargetValueType; + typedef typename Superclass::TargetSampleType TargetSampleType; + typedef typename Superclass::TargetListSampleType TargetListSampleType; + + // Confidence map related typedefs + typedef typename Superclass::ConfidenceValueType ConfidenceValueType; + typedef typename Superclass::ConfidenceSampleType ConfidenceSampleType; + typedef typename Superclass::ConfidenceListSampleType ConfidenceListSampleType; + + itkNewMacro(Self); + itkTypeMacro(PCAModel, DimensionalityReductionModel); + + itkSetMacro(DoResizeFlag,bool); + + itkSetMacro(WriteEigenvectors, bool); + itkGetMacro(WriteEigenvectors, bool); + + bool CanReadFile(const std::string & filename); + bool CanWriteFile(const std::string & filename); + + void Save(const std::string & filename, const std::string & name="") ITK_OVERRIDE; + void Load(const std::string & filename, const std::string & name="") ITK_OVERRIDE; + + void Train() ITK_OVERRIDE; protected: - PCAModel(); - ~PCAModel() ITK_OVERRIDE; + PCAModel(); + ~PCAModel() ITK_OVERRIDE; - virtual TargetSampleType DoPredict(const InputSampleType& input, ConfidenceValueType * quality = ITK_NULLPTR) const; + virtual TargetSampleType DoPredict( + const InputSampleType& input, + ConfidenceValueType * quality = ITK_NULLPTR) const; + + virtual void DoPredictBatch( + const InputListSampleType *, + const unsigned int & startIndex, + const unsigned int & size, + TargetListSampleType *, + ConfidenceListSampleType * quality = ITK_NULLPTR) const ITK_OVERRIDE; - virtual void DoPredictBatch(const InputListSampleType *, const unsigned int & startIndex, const unsigned int & size, TargetListSampleType *, ConfidenceListSampleType * quality = ITK_NULLPTR) const ITK_OVERRIDE; - private: - shark::LinearModel<> m_encoder; - shark::LinearModel<> m_decoder; - shark::PCA m_pca; - //unsigned int m_Dimension; - bool m_Do_resize_flag; - bool m_WriteEigenvectors; + shark::LinearModel<> m_Encoder; + shark::LinearModel<> m_Decoder; + shark::PCA m_PCA; + bool m_DoResizeFlag; + bool m_WriteEigenvectors; }; } // end namespace otb diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.txx b/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.txx index 063b54f4f4..376b107da1 100644 --- a/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.txx +++ b/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModel.txx @@ -36,7 +36,6 @@ #include <shark/Algorithms/GradientDescent/Rprop.h>// the RProp optimization algorithm #include <shark/ObjectiveFunctions/Loss/SquaredLoss.h> // squared loss used for regression #include <shark/ObjectiveFunctions/Regularizer.h> //L2 regulariziation - #include <shark/ObjectiveFunctions/ErrorFunction.h> #if defined(__GNUC__) || defined(__clang__) #pragma GCC diagnostic pop @@ -45,125 +44,116 @@ namespace otb { - template <class TInputValue> PCAModel<TInputValue>::PCAModel() { - this->m_IsDoPredictBatchMultiThreaded = true; - this->m_Dimension = 0; + this->m_IsDoPredictBatchMultiThreaded = true; + this->m_Dimension = 0; } - template <class TInputValue> PCAModel<TInputValue>::~PCAModel() { } - template <class TInputValue> -void PCAModel<TInputValue>::Train() +void +PCAModel<TInputValue>::Train() { - - std::vector<shark::RealVector> features; - - Shark::ListSampleToSharkVector(this->GetInputListSample(), features); - - shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features ); - m_pca.setData(inputSamples); - m_pca.encoder(m_encoder, this->m_Dimension); - m_pca.decoder(m_decoder, this->m_Dimension); - -} + std::vector<shark::RealVector> features; + Shark::ListSampleToSharkVector(this->GetInputListSample(), features); + + shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features ); + m_PCA.setData(inputSamples); + m_PCA.encoder(m_Encoder, this->m_Dimension); + m_PCA.decoder(m_Decoder, this->m_Dimension); +} template <class TInputValue> -bool PCAModel<TInputValue>::CanReadFile(const std::string & filename) +bool +PCAModel<TInputValue>::CanReadFile(const std::string & filename) { - try - { - this->Load(filename); - m_encoder.name(); - } - catch(...) - { - return false; - } - return true; + try + { + this->Load(filename); + m_Encoder.name(); + } + catch(...) + { + return false; + } + return true; } - template <class TInputValue> bool PCAModel<TInputValue>::CanWriteFile(const std::string & /*filename*/) { - return true; + return true; } template <class TInputValue> -void PCAModel<TInputValue>::Save(const std::string & filename, const std::string & /*name*/) +void +PCAModel<TInputValue>::Save(const std::string & filename, const std::string & /*name*/) { - std::ofstream ofs(filename); - //ofs << m_encoder.name() << std::endl; //first line - ofs << "pca" << std::endl; //first line - boost::archive::polymorphic_text_oarchive oa(ofs); - m_encoder.write(oa); - ofs.close(); - - if (this->m_WriteEigenvectors == true) // output the map vectors in a txt file - { - std::ofstream otxt(filename+".txt"); - - otxt << "Eigenvectors : " << m_pca.eigenvectors() << std::endl; - otxt << "Eigenvalues : " << m_pca.eigenvalues() << std::endl; - - std::vector<shark::RealVector> features; - - shark::SquaredLoss<shark::RealVector> loss; - Shark::ListSampleToSharkVector(this->GetInputListSample(), features); - shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features ); - otxt << "Reconstruction error : " << loss.eval(inputSamples,m_decoder(m_encoder(inputSamples))) << std::endl; -otxt.close(); -} + std::ofstream ofs(filename); + ofs << "pca" << std::endl; //first line + boost::archive::polymorphic_text_oarchive oa(ofs); + m_Encoder.write(oa); + ofs.close(); + + if (this->m_WriteEigenvectors == true) // output the map vectors in a txt file + { + std::ofstream otxt(filename+".txt"); + + otxt << "Eigenvectors : " << m_PCA.eigenvectors() << std::endl; + otxt << "Eigenvalues : " << m_PCA.eigenvalues() << std::endl; + + std::vector<shark::RealVector> features; + + shark::SquaredLoss<shark::RealVector> loss; + Shark::ListSampleToSharkVector(this->GetInputListSample(), features); + shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features ); + otxt << "Reconstruction error : " << + loss.eval(inputSamples,m_Decoder(m_Encoder(inputSamples))) << std::endl; + otxt.close(); + } } template <class TInputValue> -void PCAModel<TInputValue>::Load(const std::string & filename, const std::string & /*name*/) +void +PCAModel<TInputValue>::Load(const std::string & filename, const std::string & /*name*/) { - std::ifstream ifs(filename); - char encoder[256]; - ifs.getline(encoder,256); - std::string encoderstr(encoder); - - //if (encoderstr != m_encoder.name()){ - if (encoderstr != "pca"){ - itkExceptionMacro(<< "Error opening " << filename.c_str() ); + std::ifstream ifs(filename); + char encoder[256]; + ifs.getline(encoder,256); + std::string encoderstr(encoder); + + if (encoderstr != "pca"){ + itkExceptionMacro(<< "Error opening " << filename.c_str() ); } - boost::archive::polymorphic_text_iarchive ia(ifs); - m_encoder.read(ia); - ifs.close(); - if (this->m_Dimension ==0) - { - this->m_Dimension = m_encoder.outputSize(); - } - - - auto eigenvectors = m_encoder.matrix(); - eigenvectors.resize(this->m_Dimension,m_encoder.inputSize()); - - m_encoder.setStructure(eigenvectors, m_encoder.offset() ); - - - + boost::archive::polymorphic_text_iarchive ia(ifs); + m_Encoder.read(ia); + ifs.close(); + if (this->m_Dimension ==0) + { + this->m_Dimension = m_Encoder.outputSize(); + } + + auto eigenvectors = m_Encoder.matrix(); + eigenvectors.resize(this->m_Dimension,m_Encoder.inputSize()); + + m_Encoder.setStructure(eigenvectors, m_Encoder.offset() ); } - template <class TInputValue> typename PCAModel<TInputValue>::TargetSampleType PCAModel<TInputValue>::DoPredict(const InputSampleType & value, ConfidenceValueType * /*quality*/) const { - shark::RealVector samples(value.Size()); - for(size_t i = 0; i < value.Size();i++) + shark::RealVector samples(value.Size()); + for(size_t i = 0; i < value.Size();i++) { - samples[i]=value[i]; + samples[i]=value[i]; } std::vector<shark::RealVector> features; @@ -171,44 +161,37 @@ PCAModel<TInputValue>::DoPredict(const InputSampleType & value, ConfidenceValueT shark::Data<shark::RealVector> data = shark::createDataFromRange(features); - data = m_encoder(data); + data = m_Encoder(data); TargetSampleType target; target.SetSize(this->m_Dimension); - - for(unsigned int a = 0; a < this->m_Dimension; ++a){ - target[a]=data.element(0)[a]; - } - return target; + + for(unsigned int a = 0; a < this->m_Dimension; ++a){ + target[a]=data.element(0)[a]; + } + return target; } - template <class TInputValue> void PCAModel<TInputValue> ::DoPredictBatch(const InputListSampleType *input, const unsigned int & startIndex, const unsigned int & size, TargetListSampleType * targets, ConfidenceListSampleType * /*quality*/) const { - - std::vector<shark::RealVector> features; - Shark::ListSampleRangeToSharkVector(input, features,startIndex,size); - shark::Data<shark::RealVector> data = shark::createDataFromRange(features); - TargetSampleType target; - data = m_encoder(data); - unsigned int id = startIndex; - target.SetSize(this->m_Dimension); - for(const auto& p : data.elements()){ - - for(unsigned int a = 0; a < this->m_Dimension; ++a){ - target[a]=p[a]; - //target[a]=1; - - //target.SetElement(a,p[a]); - } -targets->SetMeasurementVector(id,target); -++id; - + std::vector<shark::RealVector> features; + Shark::ListSampleRangeToSharkVector(input, features,startIndex,size); + shark::Data<shark::RealVector> data = shark::createDataFromRange(features); + TargetSampleType target; + data = m_Encoder(data); + unsigned int id = startIndex; + target.SetSize(this->m_Dimension); + for(const auto& p : data.elements()) + { + for(unsigned int a = 0; a < this->m_Dimension; ++a) + { + target[a]=p[a]; + } + targets->SetMeasurementVector(id,target); + ++id; } - } - } // namespace otb #endif diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModelFactory.h b/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModelFactory.h index 8e044713f3..86fb9a5676 100644 --- a/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModelFactory.h +++ b/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModelFactory.h @@ -20,13 +20,15 @@ #ifndef otbPCAModelFactory_h #define otbPCAModelFactory_h - #include "itkObjectFactoryBase.h" #include "itkImageIOBase.h" namespace otb { - + +/** \class PCAModelFactory + * Factory for the PCAModel + */ template <class TInputValue, class TTargetValue> class ITK_EXPORT PCAModelFactory : public itk::ObjectFactoryBase { @@ -61,18 +63,12 @@ protected: private: PCAModelFactory(const Self &); //purposely not implemented void operator =(const Self&); //purposely not implemented - }; - - } //namespace otb - #ifndef OTB_MANUAL_INSTANTIATION #include "otbPCAModelFactory.txx" #endif #endif - - diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModelFactory.txx b/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModelFactory.txx index aeafff621d..ab31accbe0 100644 --- a/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModelFactory.txx +++ b/Modules/Learning/DimensionalityReductionLearning/include/otbPCAModelFactory.txx @@ -20,7 +20,6 @@ #ifndef otbPCAFactory_txx #define otbPCAFactory_txx - #include "otbPCAModelFactory.h" #include "itkCreateObjectFunction.h" @@ -29,18 +28,19 @@ namespace otb { + template <class TInputValue, class TOutputValue> PCAModelFactory<TInputValue,TOutputValue>::PCAModelFactory() { - std::string classOverride = std::string("DimensionalityReductionModel"); std::string subclass = std::string("PCAModel"); - this->RegisterOverride(classOverride.c_str(), - subclass.c_str(), - "Shark PCA ML Model", - 1, - itk::CreateObjectFunction<PCAModel<TInputValue>>::New()); + this->RegisterOverride( + classOverride.c_str(), + subclass.c_str(), + "Shark PCA ML Model", + 1, + itk::CreateObjectFunction<PCAModel<TInputValue>>::New()); } template <class TInputValue, class TOutputValue> diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModel.h b/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModel.h index 1025ab0e6c..f1a10a90c8 100644 --- a/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModel.h +++ b/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModel.h @@ -20,11 +20,8 @@ #ifndef otbSOMModel_h #define otbSOMModel_h -//#include "DimensionalityReductionModel.h" #include "otbSOMMap.h" -#include "otbSOM.h" - #include "itkEuclideanDistanceMetric.h" // the distance function #include "otbCzihoSOMLearningBehaviorFunctor.h" @@ -35,125 +32,126 @@ namespace otb { + +/** \class SOMModel + * MachineLearningModel for Self-Organizing Map + */ template <class TInputValue, unsigned int MapDimension> -class ITK_EXPORT SOMModel: public MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>> +class ITK_EXPORT SOMModel + : public MachineLearningModel< + itk::VariableLengthVector< TInputValue >, + itk::VariableLengthVector< TInputValue > > { - public: - - typedef SOMModel Self; - typedef MachineLearningModel<itk::VariableLengthVector< TInputValue> , itk::VariableLengthVector< TInputValue>> Superclass; - typedef itk::SmartPointer<Self> Pointer; - typedef itk::SmartPointer<const Self> ConstPointer; - - typedef typename Superclass::InputValueType InputValueType; - typedef typename Superclass::InputSampleType InputSampleType; - typedef typename Superclass::InputListSampleType InputListSampleType; - typedef typename InputListSampleType::Pointer ListSamplePointerType; - typedef typename Superclass::TargetValueType TargetValueType; - typedef typename Superclass::TargetSampleType TargetSampleType; - typedef typename Superclass::TargetListSampleType TargetListSampleType; - - /// Confidence map related typedefs - - typedef typename Superclass::ConfidenceValueType ConfidenceValueType; - typedef typename Superclass::ConfidenceSampleType ConfidenceSampleType; - typedef typename Superclass::ConfidenceListSampleType ConfidenceListSampleType; - - - - typedef SOMMap<itk::VariableLengthVector<TInputValue>,itk::Statistics::EuclideanDistanceMetric<itk::VariableLengthVector<TInputValue>>, MapDimension> MapType; - typedef typename MapType::SizeType SizeType; - typedef typename MapType::SpacingType SpacingType; - //typedef otb::SOM<InputListSampleType, MapType> EstimatorType; - typedef otb::SOM<InputListSampleType, MapType> EstimatorType; - - typedef Functor::CzihoSOMLearningBehaviorFunctor SOMLearningBehaviorFunctorType; - typedef Functor::CzihoSOMNeighborhoodBehaviorFunctor SOMNeighborhoodBehaviorFunctorType; - - itkNewMacro(Self); - itkTypeMacro(SOMModel, DimensionalityReductionModel); - - /** Accessors */ - itkSetMacro(NumberOfIterations, unsigned int); - itkGetMacro(NumberOfIterations, unsigned int); - itkSetMacro(BetaInit, double); - itkGetMacro(BetaInit, double); - itkSetMacro(WriteMap, bool); - itkGetMacro(WriteMap, bool); - itkSetMacro(BetaEnd, double); - itkGetMacro(BetaEnd, double); - itkSetMacro(MinWeight, InputValueType); - itkGetMacro(MinWeight, InputValueType); - itkSetMacro(MaxWeight, InputValueType); - itkGetMacro(MaxWeight, InputValueType); - itkSetMacro(MapSize, SizeType); - itkGetMacro(MapSize, SizeType); - itkSetMacro(NeighborhoodSizeInit, SizeType); - itkGetMacro(NeighborhoodSizeInit, SizeType); - itkSetMacro(RandomInit, bool); - itkGetMacro(RandomInit, bool); - itkSetMacro(Seed, unsigned int); - itkGetMacro(Seed, unsigned int); - itkGetObjectMacro(ListSample, InputListSampleType); - itkSetObjectMacro(ListSample, InputListSampleType); - - bool CanReadFile(const std::string & filename); - bool CanWriteFile(const std::string & filename); - - void Save(const std::string & filename, const std::string & name="") ; - void Load(const std::string & filename, const std::string & name="") ; - - void Train() ITK_OVERRIDE; - //void Dimensionality_reduction() {}; // Dimensionality reduction is done by DoPredict - - //unsigned int GetDimension() { return MapType::ImageDimension;}; + typedef SOMModel Self; + typedef MachineLearningModel< + itk::VariableLengthVector< TInputValue >, + itk::VariableLengthVector< TInputValue > > Superclass; + typedef itk::SmartPointer<Self> Pointer; + typedef itk::SmartPointer<const Self> ConstPointer; + + typedef typename Superclass::InputValueType InputValueType; + typedef typename Superclass::InputSampleType InputSampleType; + typedef typename Superclass::InputListSampleType InputListSampleType; + typedef typename InputListSampleType::Pointer ListSamplePointerType; + typedef typename Superclass::TargetValueType TargetValueType; + typedef typename Superclass::TargetSampleType TargetSampleType; + typedef typename Superclass::TargetListSampleType TargetListSampleType; + + // Confidence map related typedefs + typedef typename Superclass::ConfidenceValueType ConfidenceValueType; + typedef typename Superclass::ConfidenceSampleType ConfidenceSampleType; + typedef typename Superclass::ConfidenceListSampleType ConfidenceListSampleType; + + typedef SOMMap< + itk::VariableLengthVector<TInputValue>, + itk::Statistics::EuclideanDistanceMetric< + itk::VariableLengthVector<TInputValue> >, + MapDimension> MapType; + typedef typename MapType::SizeType SizeType; + typedef typename MapType::SpacingType SpacingType; + + typedef Functor::CzihoSOMLearningBehaviorFunctor SOMLearningBehaviorFunctorType; + typedef Functor::CzihoSOMNeighborhoodBehaviorFunctor SOMNeighborhoodBehaviorFunctorType; + + itkNewMacro(Self); + itkTypeMacro(SOMModel, DimensionalityReductionModel); + + /** Accessors */ + itkSetMacro(NumberOfIterations, unsigned int); + itkGetMacro(NumberOfIterations, unsigned int); + itkSetMacro(BetaInit, double); + itkGetMacro(BetaInit, double); + itkSetMacro(WriteMap, bool); + itkGetMacro(WriteMap, bool); + itkSetMacro(BetaEnd, double); + itkGetMacro(BetaEnd, double); + itkSetMacro(MinWeight, InputValueType); + itkGetMacro(MinWeight, InputValueType); + itkSetMacro(MaxWeight, InputValueType); + itkGetMacro(MaxWeight, InputValueType); + itkSetMacro(MapSize, SizeType); + itkGetMacro(MapSize, SizeType); + itkSetMacro(NeighborhoodSizeInit, SizeType); + itkGetMacro(NeighborhoodSizeInit, SizeType); + itkSetMacro(RandomInit, bool); + itkGetMacro(RandomInit, bool); + itkSetMacro(Seed, unsigned int); + itkGetMacro(Seed, unsigned int); + itkGetObjectMacro(ListSample, InputListSampleType); + itkSetObjectMacro(ListSample, InputListSampleType); + + bool CanReadFile(const std::string & filename); + bool CanWriteFile(const std::string & filename); + + void Save(const std::string & filename, const std::string & name="") ; + void Load(const std::string & filename, const std::string & name="") ; + + void Train() ITK_OVERRIDE; + protected: - SOMModel(); - ~SOMModel() ITK_OVERRIDE; - + SOMModel(); + ~SOMModel() ITK_OVERRIDE; + private: - typename MapType::Pointer m_SOMMap; - - virtual TargetSampleType DoPredict(const InputSampleType& input, ConfidenceValueType * quality = ITK_NULLPTR) const; - - /** Map Parameters used for training */ - - SizeType m_MapSize; - /** Number of iterations */ - unsigned int m_NumberOfIterations; - /** Initial learning coefficient */ - double m_BetaInit; - /** Final learning coefficient */ - double m_BetaEnd; - /** Initial neighborhood size */ - SizeType m_NeighborhoodSizeInit; - /** Minimum initial neuron weights */ - InputValueType m_MinWeight; - /** Maximum initial neuron weights */ - InputValueType m_MaxWeight; - /** Random initialization bool */ - bool m_RandomInit; - /** Seed for random initialization */ - unsigned int m_Seed; - /** The input list sample */ - ListSamplePointerType m_ListSample; - /** Behavior of the Learning weightening (link to the beta coefficient) */ - SOMLearningBehaviorFunctorType m_BetaFunctor; - /** Behavior of the Neighborhood extent */ - SOMNeighborhoodBehaviorFunctorType m_NeighborhoodSizeFunctor; - /** Write the SOM Map vectors in a txt file */ - bool m_WriteMap; + typename MapType::Pointer m_SOMMap; + + virtual TargetSampleType DoPredict( + const InputSampleType& input, + ConfidenceValueType * quality = ITK_NULLPTR) const; + + /** Map size (width, height) */ + SizeType m_MapSize; + /** Number of iterations */ + unsigned int m_NumberOfIterations; + /** Initial learning coefficient */ + double m_BetaInit; + /** Final learning coefficient */ + double m_BetaEnd; + /** Initial neighborhood size */ + SizeType m_NeighborhoodSizeInit; + /** Minimum initial neuron weights */ + InputValueType m_MinWeight; + /** Maximum initial neuron weights */ + InputValueType m_MaxWeight; + /** Random initialization bool */ + bool m_RandomInit; + /** Seed for random initialization */ + unsigned int m_Seed; + /** The input list sample */ + ListSamplePointerType m_ListSample; + /** Behavior of the Learning weightening (link to the beta coefficient) */ + SOMLearningBehaviorFunctorType m_BetaFunctor; + /** Behavior of the Neighborhood extent */ + SOMNeighborhoodBehaviorFunctorType m_NeighborhoodSizeFunctor; + /** Write the SOM Map vectors in a txt file */ + bool m_WriteMap; }; - } // end namespace otb - #ifndef OTB_MANUAL_INSTANTIATION #include "otbSOMModel.txx" #endif - #endif - diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModel.txx b/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModel.txx index 1fd4c6ea20..e0bfe52f92 100644 --- a/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModel.txx +++ b/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModel.txx @@ -21,208 +21,210 @@ #define otbSOMModel_txx #include "otbSOMModel.h" - +#include "otbSOM.h" #include "otbImageFileReader.h" #include "otbImageFileWriter.h" #include "itkMacro.h" - - -// test text file #include "itkImageRegionIterator.h" #include "itkImageRegionConstIterator.h" -#include <fstream> - - #include "itkImage.h" +#include <fstream> + namespace otb { - -template <class TInputValue, unsigned int MapDimension> -SOMModel<TInputValue, MapDimension>::SOMModel() +namespace internal +{ +template<typename T> +std::ostream& BinaryWrite(std::ostream& stream, const T& value) { - this->m_Dimension = MapType::ImageDimension; + return stream.write(reinterpret_cast<const char*>(&value), sizeof(T)); } - -template <class TInputValue, unsigned int MapDimension> -SOMModel<TInputValue, MapDimension>::~SOMModel() +std::ostream& BinaryWriteString(std::ofstream& stream, const std::string& value) { + return stream.write(value.c_str(), value.length()); } - -template <class TInputValue, unsigned int MapDimension> -void SOMModel<TInputValue, MapDimension>::Train() +template<typename T> +std::istream & BinaryRead(std::istream& stream, T& value) { - - typename EstimatorType::Pointer estimator = EstimatorType::New(); - - estimator->SetListSample(m_ListSample); - estimator->SetMapSize(m_MapSize); - estimator->SetNeighborhoodSizeInit(m_NeighborhoodSizeInit); - estimator->SetNumberOfIterations(m_NumberOfIterations); - estimator->SetBetaInit(m_BetaInit); - estimator->SetBetaEnd(m_BetaEnd); - estimator->SetMaxWeight(m_MaxWeight); - //AddProcess(estimator,"Learning"); - estimator->Update(); - m_SOMMap = estimator->GetOutput(); - } - - + return stream.read(reinterpret_cast<char*>(&value), sizeof(T)); +} +} // end of namespace internal template <class TInputValue, unsigned int MapDimension> -bool SOMModel<TInputValue, MapDimension>::CanReadFile(const std::string & filename) +SOMModel<TInputValue, MapDimension>::SOMModel() { - try - { - this->Load(filename); - } - catch(...) - { - return false; - } - return true; + this->m_Dimension = MapType::ImageDimension; } - template <class TInputValue, unsigned int MapDimension> -bool SOMModel<TInputValue, MapDimension>::CanWriteFile(const std::string & /*filename*/) +SOMModel<TInputValue, MapDimension>::~SOMModel() { - return true; } -template<typename T> -std::ostream& binary_write(std::ostream& stream, const T& value){ - return stream.write(reinterpret_cast<const char*>(&value), sizeof(T)); -} - - -std::ostream& binary_write_string(std::ofstream& stream, const std::string& value){ - return stream.write(value.c_str(), value.length()); +template <class TInputValue, unsigned int MapDimension> +void +SOMModel<TInputValue, MapDimension>::Train() +{ + typedef otb::SOM<InputListSampleType, MapType> EstimatorType; + typename EstimatorType::Pointer estimator = EstimatorType::New(); + estimator->SetListSample(m_ListSample); + estimator->SetMapSize(m_MapSize); + estimator->SetNeighborhoodSizeInit(m_NeighborhoodSizeInit); + estimator->SetNumberOfIterations(m_NumberOfIterations); + estimator->SetBetaInit(m_BetaInit); + estimator->SetBetaEnd(m_BetaEnd); + estimator->SetMaxWeight(m_MaxWeight); + estimator->Update(); + m_SOMMap = estimator->GetOutput(); } -template<typename T> -std::istream & binary_read(std::istream& stream, T& value){ - return stream.read(reinterpret_cast<char*>(&value), sizeof(T)); +template <class TInputValue, unsigned int MapDimension> +bool +SOMModel<TInputValue, MapDimension>::CanReadFile(const std::string & filename) +{ + try + { + this->Load(filename); + } + catch(...) + { + return false; + } + return true; } - - template <class TInputValue, unsigned int MapDimension> -void SOMModel<TInputValue, MapDimension>::Save(const std::string & filename, const std::string & /*name*/) +bool +SOMModel<TInputValue, MapDimension>::CanWriteFile(const std::string & /*filename*/) { - itk::ImageRegionConstIterator<MapType> inputIterator(m_SOMMap,m_SOMMap->GetLargestPossibleRegion()); - inputIterator.GoToBegin(); - std::ofstream ofs(filename, std::ios::binary); - binary_write_string(ofs,"som"); - binary_write(ofs,static_cast<unsigned int>(MapDimension)); - SizeType size = m_SOMMap->GetLargestPossibleRegion().GetSize() ; - for (size_t i=0;i<MapDimension;i++){ - binary_write(ofs,size[i]); - } - - binary_write(ofs,inputIterator.Get().GetNumberOfElements()); - while(!inputIterator.IsAtEnd()){ - InputSampleType vect = inputIterator.Get(); - for (size_t i=0;i<vect.GetNumberOfElements();i++){ - binary_write(ofs,vect[i]); - } - ++inputIterator; - } - ofs.close(); - - if (this->m_WriteMap == true) // output the map vectors in a txt file - { - std::ofstream otxt(filename+".txt"); - inputIterator.GoToBegin(); - while(!inputIterator.IsAtEnd()) - { - InputSampleType vect = inputIterator.Get(); - for (size_t i=0;i<vect.GetNumberOfElements();i++) - { - - otxt << vect[i] << " "; - } - otxt << std::endl; - ++inputIterator; - } - otxt.close(); - } + return true; } template <class TInputValue, unsigned int MapDimension> -void SOMModel<TInputValue, MapDimension>::Load(const std::string & filename, const std::string & /*name*/) +void +SOMModel<TInputValue, MapDimension>::Save(const std::string & filename, const std::string & /*name*/) { - - std::ifstream ifs(filename, std::ios::binary); - - /** Read the model key (should be som) */ - char s[]=" "; - for (int i=0; i<3; i++){ - binary_read(ifs,s[i]); - } - std::string modelType(s); - /** Read the dimension of the map (should be equal to MapDimension) */ - - unsigned int dimension; - binary_read(ifs,dimension); - if (modelType != "som" || dimension != MapDimension){ - itkExceptionMacro(<< "Error opening " << filename.c_str() ); + itk::ImageRegionConstIterator<MapType> inputIterator(m_SOMMap,m_SOMMap->GetLargestPossibleRegion()); + inputIterator.GoToBegin(); + std::ofstream ofs(filename, std::ios::binary); + internal::BinaryWriteString(ofs,"som"); + internal::BinaryWrite(ofs,static_cast<unsigned int>(MapDimension)); + SizeType size = m_SOMMap->GetLargestPossibleRegion().GetSize() ; + for (size_t i=0;i<MapDimension;i++) + { + internal::BinaryWrite(ofs,size[i]); } - SizeType size; - itk::Index< MapDimension > index; - for (unsigned int i=0 ; i<MapDimension; i++) - { - binary_read(ifs,size[i]); - index[i]=0; - } - unsigned int numberOfElements; - binary_read(ifs,numberOfElements); - m_SOMMap = MapType::New(); - typename MapType::RegionType region; - region.SetSize( size ); - m_SOMMap->SetNumberOfComponentsPerPixel(numberOfElements); - region.SetIndex( index ); - m_SOMMap->SetRegions( region ); - m_SOMMap->Allocate(); - - itk::ImageRegionIterator<MapType> outputIterator(m_SOMMap,region); - outputIterator.GoToBegin(); - std::string value; - while(!outputIterator.IsAtEnd()){ - InputSampleType vect(numberOfElements); - for (unsigned int i=0 ; i<numberOfElements; i++) - { - float v; // InputValue type is not the same during training anddimredvector. - binary_read(ifs,v); - vect[i] = static_cast<double>(v); - } - outputIterator.Set(vect); - ++outputIterator; - } - ifs.close(); - - this->m_Dimension = MapType::ImageDimension; + internal::BinaryWrite(ofs,inputIterator.Get().GetNumberOfElements()); + while(!inputIterator.IsAtEnd()) + { + InputSampleType vect = inputIterator.Get(); + for (size_t i=0;i<vect.GetNumberOfElements();i++) + { + internal::BinaryWrite(ofs,vect[i]); + } + ++inputIterator; + } + ofs.close(); + + // output the map vectors in a txt file + if (this->m_WriteMap == true) + { + std::ofstream otxt(filename+".txt"); + inputIterator.GoToBegin(); + while(!inputIterator.IsAtEnd()) + { + InputSampleType vect = inputIterator.Get(); + for (size_t i=0;i<vect.GetNumberOfElements();i++) + { + otxt << vect[i] << " "; + } + otxt << std::endl; + ++inputIterator; + } + otxt.close(); + } } +template <class TInputValue, unsigned int MapDimension> +void +SOMModel<TInputValue, MapDimension>::Load(const std::string & filename, const std::string & /*name*/) +{ + std::ifstream ifs(filename, std::ios::binary); + + /** Read the model key (should be som) */ + char s[]=" "; + for (int i=0; i<3; i++) + { + internal::BinaryRead(ifs,s[i]); + } + std::string modelType(s); + /** Read the dimension of the map (should be equal to MapDimension) */ + + unsigned int dimension; + internal::BinaryRead(ifs,dimension); + if (modelType != "som" || dimension != MapDimension) + { + itkExceptionMacro(<< "Error opening " << filename.c_str() ); + } + + SizeType size; + itk::Index< MapDimension > index; + for (unsigned int i=0 ; i<MapDimension; i++) + { + internal::BinaryRead(ifs,size[i]); + index[i]=0; + } + unsigned int numberOfElements; + internal::BinaryRead(ifs,numberOfElements); + m_SOMMap = MapType::New(); + typename MapType::RegionType region; + region.SetSize( size ); + m_SOMMap->SetNumberOfComponentsPerPixel(numberOfElements); + region.SetIndex( index ); + m_SOMMap->SetRegions( region ); + m_SOMMap->Allocate(); + + itk::ImageRegionIterator<MapType> outputIterator(m_SOMMap,region); + outputIterator.GoToBegin(); + std::string value; + while(!outputIterator.IsAtEnd()) + { + InputSampleType vect(numberOfElements); + for (unsigned int i=0 ; i<numberOfElements; i++) + { + // InputValue type is not the same during training anddimredvector. + float v; + internal::BinaryRead(ifs,v); + vect[i] = static_cast<double>(v); + } + outputIterator.Set(vect); + ++outputIterator; + } + ifs.close(); + this->m_Dimension = MapType::ImageDimension; +} template <class TInputValue, unsigned int MapDimension> typename SOMModel<TInputValue, MapDimension>::TargetSampleType -SOMModel<TInputValue, MapDimension>::DoPredict(const InputSampleType & value, ConfidenceValueType * /*quality*/) const -{ - TargetSampleType target; - target.SetSize(this->m_Dimension); - - auto winner =m_SOMMap->GetWinner(value); - for (unsigned int i=0; i< this->m_Dimension ;i++) { - target[i] = winner.GetElement(i); - } - - return target; +SOMModel<TInputValue, MapDimension>::DoPredict( + const InputSampleType & value, + ConfidenceValueType * /*quality*/) const +{ + TargetSampleType target; + target.SetSize(this->m_Dimension); + + auto winner =m_SOMMap->GetWinner(value); + for (unsigned int i=0; i< this->m_Dimension ;i++) + { + target[i] = winner.GetElement(i); + } + return target; } } // namespace otb diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModelFactory.h b/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModelFactory.h index 4085d65199..72d590a3f8 100644 --- a/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModelFactory.h +++ b/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModelFactory.h @@ -20,13 +20,15 @@ #ifndef otbSOMModelFactory_h #define otbSOMModelFactory_h - #include "itkObjectFactoryBase.h" #include "itkImageIOBase.h" namespace otb { - + +/** \class SOMModelFactory + * Factory for SOMModel + */ template <class TInputValue, class TTargetValue, unsigned int MapDimension> class ITK_EXPORT SOMModelFactory : public itk::ObjectFactoryBase { @@ -64,15 +66,10 @@ private: }; - - } //namespace otb - #ifndef OTB_MANUAL_INSTANTIATION #include "otbSOMModelFactory.txx" #endif #endif - - diff --git a/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModelFactory.txx b/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModelFactory.txx index bcef5c92a4..5799660768 100644 --- a/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModelFactory.txx +++ b/Modules/Learning/DimensionalityReductionLearning/include/otbSOMModelFactory.txx @@ -20,7 +20,6 @@ #ifndef otbSOMFactory_txx #define otbSOMFactory_txx - #include "otbSOMModelFactory.h" #include "itkCreateObjectFunction.h" @@ -32,16 +31,15 @@ namespace otb template <class TInputValue, class TOutputValue, unsigned int MapDimension> SOMModelFactory<TInputValue,TOutputValue,MapDimension>::SOMModelFactory() { - std::string classOverride = std::string("DimensionalityReductionModel"); std::string subclass = std::string("SOMModel"); - this->RegisterOverride(classOverride.c_str(), - subclass.c_str(), - "SOM DR Model", - 1, - - itk::CreateObjectFunction<SOMModel<TInputValue, MapDimension>>::New()); + this->RegisterOverride( + classOverride.c_str(), + subclass.c_str(), + "SOM DR Model", + 1, + itk::CreateObjectFunction<SOMModel<TInputValue, MapDimension>>::New()); } template <class TInputValue, class TOutputValue, unsigned int MapDimension> -- GitLab