Commit 00fb23c7 authored by Cédric Traizet's avatar Cédric Traizet
Browse files

vector app work in progress

No related merge requests found
Showing with 72 additions and 46 deletions
+72 -46
......@@ -35,10 +35,11 @@ namespace Wrapper
{
/** Utility function to negate std::isalnum */
/*bool IsNotAlphaNum(char c)
bool IsNotAlphaNum(char c)
{
return !std::isalnum(c);
}*/
}
class CbDimensionalityReductionVector : public Application
{
public:
......@@ -97,8 +98,8 @@ class CbDimensionalityReductionVector : public Application
AddParameter(ParameterType_ListView, "feat", "Field names to be calculated."); //
SetParameterDescription("feat","List of field names in the input vector data used as features for training."); //
AddParameter(ParameterType_ListView, "feat_out", "Field names to be calculated."); //
SetParameterDescription("feat_out","List of field names in the input vector data used as features for training."); //
AddParameter(ParameterType_StringList, "featout", "Field names to be calculated."); //
SetParameterDescription("featout","List of field names in the input vector data used as features for training."); //
AddParameter(ParameterType_OutputFilename, "out", "Output vector data file containing the reduced vector");
SetParameterDescription("out","Output vector data file storing sample values (OGR format)."
......@@ -111,40 +112,46 @@ class CbDimensionalityReductionVector : public Application
SetDocExampleParameterValue("model", "model.txt");
SetDocExampleParameterValue("out", "vectorDataOut.shp");
SetDocExampleParameterValue("feat", "perimeter area width");
SetDocExampleParameterValue("feat_out", "perimeter area width");
SetDocExampleParameterValue("featout", "perimeter area width");
//SetOfficialDocLink();
}
void DoUpdateParameters() ITK_OVERRIDE
{
/** I don't know what this does */
/*
if ( HasValue("in") )
{
std::string shapefile = GetParameterString("in");
otb::ogr::DataSource::Pointer ogrDS;
OGRSpatialReference oSRS("");
std::vector<std::string> options;
ogrDS = otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Read);
otb::ogr::Layer layer = ogrDS->GetLayer(0);
OGRFeatureDefn &layerDefn = layer.GetLayerDefn();
ClearChoices("feat");
for(int iField=0; iField< layerDefn.GetFieldCount(); iField++)
{
std::string item = layerDefn.GetFieldDefn(iField)->GetNameRef();
std::string key(item);
key.erase( std::remove_if(key.begin(),key.end(),IsNotAlphaNum), key.end());
std::transform(key.begin(), key.end(), key.begin(), tolower);
OGRFieldType fieldType = layerDefn.GetFieldDefn(iField)->GetType();
if(fieldType == OFTInteger || ogr::version_proxy::IsOFTInteger64(fieldType) || fieldType == OFTReal)
{
std::string tmpKey="feat."+key;
AddChoice(tmpKey,item);
}
}
}*/
if ( HasValue("in") )
{
std::string shapefile = GetParameterString("in");
otb::ogr::DataSource::Pointer ogrDS;
OGRSpatialReference oSRS("");
std::vector<std::string> options;
ogrDS = otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Read);
otb::ogr::Layer layer = ogrDS->GetLayer(0);
OGRFeatureDefn &layerDefn = layer.GetLayerDefn();
ClearChoices("feat");
//ClearChoices("featout");
for(int iField=0; iField< layerDefn.GetFieldCount(); iField++)
{
std::string item = layerDefn.GetFieldDefn(iField)->GetNameRef();
std::string key(item);
key.erase( std::remove_if(key.begin(),key.end(),IsNotAlphaNum), key.end());
std::transform(key.begin(), key.end(), key.begin(), tolower);
OGRFieldType fieldType = layerDefn.GetFieldDefn(iField)->GetType();
if(fieldType == OFTInteger || ogr::version_proxy::IsOFTInteger64(fieldType) || fieldType == OFTReal)
{
std::string tmpKey="feat."+key;
AddChoice(tmpKey,item);
}
/*
if(fieldType == OFTInteger || ogr::version_proxy::IsOFTInteger64(fieldType) || fieldType == OFTReal)
{
std::string tmpKey="featout."+key;
AddChoice(tmpKey,item);
}*/
}
}
}
void DoExecute() ITK_OVERRIDE
......@@ -222,25 +229,30 @@ class CbDimensionalityReductionVector : public Application
/** Create/Update Output Shape file */
std::cout << GetParameterStringList("featout").size() << std::endl;
ogr::DataSource::Pointer output;
ogr::DataSource::Pointer buffer = ogr::DataSource::New();
bool updateMode = false;
if (IsParameterEnabled("out") && HasValue("out"))
{
// Create new OGRDataSource
output = ogr::DataSource::New(GetParameterString("out"), ogr::DataSource::Modes::Overwrite);
otb::ogr::Layer newLayer = output->CreateLayer(
GetParameterString("out"),
const_cast<OGRSpatialReference*>(layer.GetSpatialRef()),
layer.GetGeomType());
otb::ogr::Layer newLayer = output->CreateLayer(GetParameterString("out"),
const_cast<OGRSpatialReference*>(layer.GetSpatialRef()),
layer.GetGeomType());
// Copy existing fields
OGRFeatureDefn &inLayerDefn = layer.GetLayerDefn();
for (int k=0 ; k<inLayerDefn.GetFieldCount() ; k++)
for (int k=0 ; k<inLayerDefn.GetFieldCount()-nbFeatures ; k++) // we don't copy the original bands
{
OGRFieldDefn fieldDefn(inLayerDefn.GetFieldDefn(k));
newLayer.CreateField(fieldDefn);
}
}
/*
else
{
// Update mode
......@@ -253,11 +265,13 @@ class CbDimensionalityReductionVector : public Application
source->Clear();
// Re-open input data source in update mode
output = otb::ogr::DataSource::New(shapefile, otb::ogr::DataSource::Modes::Update_LayerUpdate);
}
}*/
otb::ogr::Layer outLayer = output->GetLayer(0);
OGRErr errStart = outLayer.ogr().StartTransaction();
/*
if (errStart != OGRERR_NONE)
{
itkExceptionMacro(<< "Unable to start transaction for OGR layer " << outLayer.ogr().GetName() << ".");
......@@ -266,16 +280,16 @@ class CbDimensionalityReductionVector : public Application
// Add the field of prediction in the output layer if field not exist
OGRFeatureDefn &layerDefn = layer.GetLayerDefn();
int idx = layerDefn.GetFieldIndex(GetParameterString("feat_out").c_str());
int idx = layerDefn.GetFieldIndex(GetParameterStringList("featout").c_str());
if (idx >= 0)
{
if (layerDefn.GetFieldDefn(idx)->GetType() != OFTInteger)
itkExceptionMacro("Field name "<< GetParameterString("feat_out") << " already exists with a different type!");
itkExceptionMacro("Field name "<< GetParameterStringList("featout") << " already exists with a different type!");
}
else
{
OGRFieldDefn predictedField(GetParameterString("feat_out").c_str(), OFTInteger);
OGRFieldDefn predictedField(GetParameterStringList("featout").c_str(), OFTInteger);
ogr::FieldDefn predictedFieldDef(predictedField);
outLayer.CreateField(predictedFieldDef);
}
......@@ -283,7 +297,7 @@ class CbDimensionalityReductionVector : public Application
// Fill output layer
unsigned int count=0;
std::string classfieldname = GetParameterString("feat_out");
std::string classfieldname = GetParameterStringList("featout");
it = layer.cbegin();
itEnd = layer.cend();
for( ; it!=itEnd ; ++it, ++count)
......@@ -312,7 +326,7 @@ class CbDimensionalityReductionVector : public Application
}
output->SyncToDisk();
clock_t toc = clock();
otbAppLogINFO( "Elapsed: "<< ((double)(toc - tic) / CLOCKS_PER_SEC)<<" seconds.");
otbAppLogINFO( "Elapsed: "<< ((double)(toc - tic) / CLOCKS_PER_SEC)<<" seconds.");*/
}
ModelPointerType m_Model;
......
......@@ -40,6 +40,12 @@ public:
itkGetMacro(Noise,double);
itkSetMacro(Noise,double);
itkGetMacro(rho,double);
itkSetMacro(rho,double);
itkGetMacro(beta,double);
itkSetMacro(beta,double);
bool CanReadFile(const std::string & filename);
bool CanWriteFile(const std::string & filename);
......@@ -67,6 +73,8 @@ private:
unsigned int m_NumberOfIterations;
double m_Regularization; // L2 Regularization parameter
double m_Noise; // probability for an input to be set to 0 (denosing autoencoder)
double m_rho; // Sparsity parameter
double m_beta; // Sparsity regularization parameter
};
} // end namespace otb
......
......@@ -9,6 +9,8 @@
//include train function
#include <shark/ObjectiveFunctions/ErrorFunction.h>
#include <shark/ObjectiveFunctions/SparseAutoencoderError.h>//the error function performing the regularisation of the hidden neurons
#include <shark/Algorithms/GradientDescent/Rprop.h>// the RProp optimization algorithm
#include <shark/ObjectiveFunctions/Loss/SquaredLoss.h> // squared loss used for regression
#include <shark/ObjectiveFunctions/Regularizer.h> //L2 regulariziation
......@@ -62,6 +64,7 @@ void AutoencoderModel<TInputValue,AutoencoderType>::TrainOneLayer(unsigned int n
shark::LabeledData<shark::RealVector,shark::RealVector> trainSet(samples,samples);//labels identical to inputs
shark::SquaredLoss<shark::RealVector> loss;
shark::ErrorFunction error(trainSet, &model, &loss);
//shark::SparseAutoencoderError error(data,&model, &loss, m_rho, m_beta);
shark::TwoNormRegularizer regularizer(error.numberOfVariables());
error.setRegularizer(m_Regularization,&regularizer);
......
......@@ -54,6 +54,7 @@ using SOM4DModelFactory = SOMModelFactory<TInputValue, TTargetValue, 4> ;
template <class TInputValue, class TTargetValue>
using SOM5DModelFactory = SOMModelFactory<TInputValue, TTargetValue, 5> ;
template <class TInputValue, class TOutputValue>
typename DimensionalityReductionModel<TInputValue,TOutputValue>::Pointer
DimensionalityReductionModelFactory<TInputValue,TOutputValue>
......
......@@ -111,7 +111,7 @@ void cbLearningApplicationBaseDR<TInputValue,TOutputValue>
dimredTrainer->SetNumberOfHiddenNeurons(nb_neuron);
dimredTrainer->SetNumberOfIterations(GetParameterInt("model.autoencoder.nbiter"));
dimredTrainer->SetRegularization(GetParameterFloat("model.autoencoder.regularization"));
dimredTrainer->SetRegularization(GetParameterFloat("model.autoencoder.noise"));
dimredTrainer->SetNoise(GetParameterFloat("model.autoencoder.noise"));
dimredTrainer->SetInputListSample(trainingListSample);
std::cout << "before train" << std::endl;
dimredTrainer->Train();
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment