diff --git a/include/AutoencoderModel.h b/include/AutoencoderModel.h index edbe4ae0524e1e18befdd512b7639f53cff5041b..739f13f6cb7b910a84fb4585344e28159368a945 100644 --- a/include/AutoencoderModel.h +++ b/include/AutoencoderModel.h @@ -59,6 +59,12 @@ public: itkGetMacro(Beta,itk::Array<double>); itkSetMacro(Beta,itk::Array<double>); + itkGetMacro(WriteLearningCurve,bool); + itkSetMacro(WriteLearningCurve,bool); + + itkGetMacro(LearningCurveFileName,std::string); + itkSetMacro(LearningCurveFileName,std::string); + bool CanReadFile(const std::string & filename); bool CanWriteFile(const std::string & filename); @@ -98,6 +104,7 @@ private: itk::Array<double> m_Beta; // Sparsity regularization parameter bool m_WriteLearningCurve; // Flag for writting the learning curve into a txt file + std::string m_LearningCurveFileName; // Name of the output learning curve printed after training }; } // end namespace otb diff --git a/include/AutoencoderModel.txx b/include/AutoencoderModel.txx index ecd39a468f9de9eee919750ed0974b8d4c03ef52..8788315b72eebf862612f04c5484f4aeb97d9f3f 100644 --- a/include/AutoencoderModel.txx +++ b/include/AutoencoderModel.txx @@ -25,6 +25,7 @@ template <class TInputValue, class AutoencoderType> AutoencoderModel<TInputValue,AutoencoderType>::AutoencoderModel() { this->m_IsDoPredictBatchMultiThreaded = true; + this->m_WriteLearningCurve = false; } @@ -36,6 +37,7 @@ AutoencoderModel<TInputValue,AutoencoderType>::~AutoencoderModel() template <class TInputValue, class AutoencoderType> void AutoencoderModel<TInputValue,AutoencoderType>::Train() { + std::cout << this->m_WriteLearningCurve << std::endl; std::vector<shark::RealVector> features; std::cout << "converting the input ListSample to Shark vector" << std::endl; Shark::ListSampleToSharkVector(this->GetInputListSample(), features); @@ -43,9 +45,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Train() shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features ); + std::ofstream ofs; + if (this->m_WriteLearningCurve =true) + { + ofs.open(m_LearningCurveFileName); + ofs << "learning curve" << std::endl; + } + /* std::ofstream ofs("/mnt/data/home/traizetc/computation/learning_curve.txt"); //learning curve ofs << "learning curve" << std::endl; - + */ if (m_Epsilon > 0){ shark::TrainingProgress<> criterion(5,m_Epsilon); @@ -106,15 +115,19 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(shark::Abstrac optimizer.init(error); std::cout<<"error before training : " << optimizer.solution().value<<std::endl; - - File << "end layer" << std::endl; - + if (this->m_WriteLearningCurve =true) + { + File << "end layer" << std::endl; + } unsigned int i=0; do{ i++; optimizer.step(error); + if (this->m_WriteLearningCurve =true) + { File << optimizer.solution().value << std::endl; + } } while( !criterion.stop( optimizer.solution() ) ); std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl; @@ -149,10 +162,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneSparseLayer(shark::A do{ i++; optimizer.step(error); + if (this->m_WriteLearningCurve =true) + { File << optimizer.solution().value << std::endl; + } } while( !criterion.stop( optimizer.solution() ) ); std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl; - + if (this->m_WriteLearningCurve =true) + { + File << "end layer" << std::endl; + } net.setParameterVector(optimizer.solution().point); m_net.push_back(net); samples = net.encode(samples); diff --git a/include/cbTrainAutoencoder.txx b/include/cbTrainAutoencoder.txx index 4efad5dae532f647cfc50961b37b56dc4cba0ced..244441d91868746af25458734b1ee313eb604da1 100644 --- a/include/cbTrainAutoencoder.txx +++ b/include/cbTrainAutoencoder.txx @@ -80,6 +80,11 @@ cbLearningApplicationBaseDR<TInputValue,TOutputValue> AddParameter(ParameterType_StringList, "model.autoencoder.beta", "Sparsity regularization strength"); SetParameterDescription("model.autoencoder.beta", "Sparsity regularization strength"); + + AddParameter(ParameterType_OutputFilename, "model.autoencoder.learningcurve", "Learning curve"); + SetParameterDescription("model.autoencoder.learningcurve", "Learning error values"); + MandatoryOff("model.autoencoder.learningcurve"); + } @@ -146,6 +151,14 @@ void cbLearningApplicationBaseDR<TInputValue,TOutputValue> dimredTrainer->SetNoise(noise); dimredTrainer->SetRho(rho); dimredTrainer->SetBeta(beta); + + if (HasValue("model.autoencoder.learningcurve") && IsParameterEnabled("model.autoencoder.learningcurve")) + { + std::cout << "yo" << std::endl; + dimredTrainer->SetWriteLearningCurve(true); + dimredTrainer->SetLearningCurveFileName(GetParameterString("model.autoencoder.learningcurve")); + } + dimredTrainer->SetInputListSample(trainingListSample); std::cout << "before train" << std::endl; dimredTrainer->Train();