Commit ef7227bd authored by Cédric Traizet's avatar Cédric Traizet
Browse files

the learning curve file can now be set in the app

No related merge requests found
Showing with 44 additions and 5 deletions
+44 -5
...@@ -59,6 +59,12 @@ public: ...@@ -59,6 +59,12 @@ public:
itkGetMacro(Beta,itk::Array<double>); itkGetMacro(Beta,itk::Array<double>);
itkSetMacro(Beta,itk::Array<double>); itkSetMacro(Beta,itk::Array<double>);
itkGetMacro(WriteLearningCurve,bool);
itkSetMacro(WriteLearningCurve,bool);
itkGetMacro(LearningCurveFileName,std::string);
itkSetMacro(LearningCurveFileName,std::string);
bool CanReadFile(const std::string & filename); bool CanReadFile(const std::string & filename);
bool CanWriteFile(const std::string & filename); bool CanWriteFile(const std::string & filename);
...@@ -98,6 +104,7 @@ private: ...@@ -98,6 +104,7 @@ private:
itk::Array<double> m_Beta; // Sparsity regularization parameter itk::Array<double> m_Beta; // Sparsity regularization parameter
bool m_WriteLearningCurve; // Flag for writting the learning curve into a txt file bool m_WriteLearningCurve; // Flag for writting the learning curve into a txt file
std::string m_LearningCurveFileName; // Name of the output learning curve printed after training
}; };
} // end namespace otb } // end namespace otb
......
...@@ -25,6 +25,7 @@ template <class TInputValue, class AutoencoderType> ...@@ -25,6 +25,7 @@ template <class TInputValue, class AutoencoderType>
AutoencoderModel<TInputValue,AutoencoderType>::AutoencoderModel() AutoencoderModel<TInputValue,AutoencoderType>::AutoencoderModel()
{ {
this->m_IsDoPredictBatchMultiThreaded = true; this->m_IsDoPredictBatchMultiThreaded = true;
this->m_WriteLearningCurve = false;
} }
...@@ -36,6 +37,7 @@ AutoencoderModel<TInputValue,AutoencoderType>::~AutoencoderModel() ...@@ -36,6 +37,7 @@ AutoencoderModel<TInputValue,AutoencoderType>::~AutoencoderModel()
template <class TInputValue, class AutoencoderType> template <class TInputValue, class AutoencoderType>
void AutoencoderModel<TInputValue,AutoencoderType>::Train() void AutoencoderModel<TInputValue,AutoencoderType>::Train()
{ {
std::cout << this->m_WriteLearningCurve << std::endl;
std::vector<shark::RealVector> features; std::vector<shark::RealVector> features;
std::cout << "converting the input ListSample to Shark vector" << std::endl; std::cout << "converting the input ListSample to Shark vector" << std::endl;
Shark::ListSampleToSharkVector(this->GetInputListSample(), features); Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
...@@ -43,9 +45,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Train() ...@@ -43,9 +45,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::Train()
shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features ); shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange( features );
std::ofstream ofs;
if (this->m_WriteLearningCurve =true)
{
ofs.open(m_LearningCurveFileName);
ofs << "learning curve" << std::endl;
}
/*
std::ofstream ofs("/mnt/data/home/traizetc/computation/learning_curve.txt"); //learning curve std::ofstream ofs("/mnt/data/home/traizetc/computation/learning_curve.txt"); //learning curve
ofs << "learning curve" << std::endl; ofs << "learning curve" << std::endl;
*/
if (m_Epsilon > 0){ if (m_Epsilon > 0){
shark::TrainingProgress<> criterion(5,m_Epsilon); shark::TrainingProgress<> criterion(5,m_Epsilon);
...@@ -106,15 +115,19 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(shark::Abstrac ...@@ -106,15 +115,19 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(shark::Abstrac
optimizer.init(error); optimizer.init(error);
std::cout<<"error before training : " << optimizer.solution().value<<std::endl; std::cout<<"error before training : " << optimizer.solution().value<<std::endl;
if (this->m_WriteLearningCurve =true)
File << "end layer" << std::endl; {
File << "end layer" << std::endl;
}
unsigned int i=0; unsigned int i=0;
do{ do{
i++; i++;
optimizer.step(error); optimizer.step(error);
if (this->m_WriteLearningCurve =true)
{
File << optimizer.solution().value << std::endl; File << optimizer.solution().value << std::endl;
}
} while( !criterion.stop( optimizer.solution() ) ); } while( !criterion.stop( optimizer.solution() ) );
std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl; std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
...@@ -149,10 +162,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneSparseLayer(shark::A ...@@ -149,10 +162,16 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneSparseLayer(shark::A
do{ do{
i++; i++;
optimizer.step(error); optimizer.step(error);
if (this->m_WriteLearningCurve =true)
{
File << optimizer.solution().value << std::endl; File << optimizer.solution().value << std::endl;
}
} while( !criterion.stop( optimizer.solution() ) ); } while( !criterion.stop( optimizer.solution() ) );
std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl; std::cout<<"error after " << i << "iterations : " << optimizer.solution().value<<std::endl;
if (this->m_WriteLearningCurve =true)
{
File << "end layer" << std::endl;
}
net.setParameterVector(optimizer.solution().point); net.setParameterVector(optimizer.solution().point);
m_net.push_back(net); m_net.push_back(net);
samples = net.encode(samples); samples = net.encode(samples);
......
...@@ -80,6 +80,11 @@ cbLearningApplicationBaseDR<TInputValue,TOutputValue> ...@@ -80,6 +80,11 @@ cbLearningApplicationBaseDR<TInputValue,TOutputValue>
AddParameter(ParameterType_StringList, "model.autoencoder.beta", "Sparsity regularization strength"); AddParameter(ParameterType_StringList, "model.autoencoder.beta", "Sparsity regularization strength");
SetParameterDescription("model.autoencoder.beta", SetParameterDescription("model.autoencoder.beta",
"Sparsity regularization strength"); "Sparsity regularization strength");
AddParameter(ParameterType_OutputFilename, "model.autoencoder.learningcurve", "Learning curve");
SetParameterDescription("model.autoencoder.learningcurve", "Learning error values");
MandatoryOff("model.autoencoder.learningcurve");
} }
...@@ -146,6 +151,14 @@ void cbLearningApplicationBaseDR<TInputValue,TOutputValue> ...@@ -146,6 +151,14 @@ void cbLearningApplicationBaseDR<TInputValue,TOutputValue>
dimredTrainer->SetNoise(noise); dimredTrainer->SetNoise(noise);
dimredTrainer->SetRho(rho); dimredTrainer->SetRho(rho);
dimredTrainer->SetBeta(beta); dimredTrainer->SetBeta(beta);
if (HasValue("model.autoencoder.learningcurve") && IsParameterEnabled("model.autoencoder.learningcurve"))
{
std::cout << "yo" << std::endl;
dimredTrainer->SetWriteLearningCurve(true);
dimredTrainer->SetLearningCurveFileName(GetParameterString("model.autoencoder.learningcurve"));
}
dimredTrainer->SetInputListSample(trainingListSample); dimredTrainer->SetInputListSample(trainingListSample);
std::cout << "before train" << std::endl; std::cout << "before train" << std::endl;
dimredTrainer->Train(); dimredTrainer->Train();
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment