20 #ifndef otbAutoencoderModel_hxx
21 #define otbAutoencoderModel_hxx
28 #if defined(__GNUC__) || defined(__clang__)
29 #pragma GCC diagnostic push
31 #if (defined (__GNUC__) && (__GNUC__ >= 9)) || (defined (__clang__) && (__clang_major__ >= 10))
32 #pragma GCC diagnostic ignored "-Wdeprecated-copy"
35 #pragma GCC diagnostic ignored "-Wshadow"
36 #pragma GCC diagnostic ignored "-Wunused-parameter"
37 #pragma GCC diagnostic ignored "-Woverloaded-virtual"
39 #include "otbSharkUtils.h"
41 #include <shark/ObjectiveFunctions/ErrorFunction.h>
44 #include <shark/Algorithms/GradientDescent/Rprop.h>
45 #include <shark/ObjectiveFunctions/Loss/SquaredLoss.h>
46 #include <shark/ObjectiveFunctions/Regularizer.h>
49 #include <shark/Algorithms/StoppingCriteria/MaxIterations.h>
50 #include <shark/Algorithms/StoppingCriteria/TrainingProgress.h>
52 #include <shark/Algorithms/GradientDescent/Adam.h>
53 #if defined(__GNUC__) || defined(__clang__)
54 #pragma GCC diagnostic pop
60 template <
class TInputValue,
class NeuronType>
63 this->m_IsDoPredictBatchMultiThreaded =
true;
64 this->m_WriteLearningCurve =
false;
67 template <
class TInputValue,
class NeuronType>
72 template <
class TInputValue,
class NeuronType>
75 std::vector<shark::RealVector> features;
76 Shark::ListSampleToSharkVector(this->GetInputListSample(), features);
77 shark::Data<shark::RealVector> inputSamples = shark::createDataFromRange(features);
78 shark::Data<shark::RealVector> inputSamples_copy = inputSamples;
81 if (this->m_WriteLearningCurve ==
true)
83 ofs.open(m_LearningCurveFileName);
84 ofs <<
"learning curve" << std::endl;
90 size_t previousShape = shark::dataDimension(inputSamples);
91 for (
unsigned int i = 0; i < m_NumberOfHiddenNeurons.Size(); ++i)
93 m_InLayers.push_back(
LayerType(previousShape, m_NumberOfHiddenNeurons[i]));
94 previousShape = m_NumberOfHiddenNeurons[i];
95 m_Encoder.add(&(m_InLayers.back()),
true);
97 for (
unsigned int i = std::max(0,
static_cast<int>(m_NumberOfHiddenNeurons.Size() - 1)); i > 0; --i)
99 m_InLayers.push_back(
LayerType(previousShape, m_NumberOfHiddenNeurons[i - 1]));
100 previousShape = m_NumberOfHiddenNeurons[i - 1];
102 m_OutLayer =
OutLayerType(previousShape, shark::dataDimension(inputSamples));
105 for (
unsigned int i = 0; i < m_NumberOfHiddenNeurons.Size(); ++i)
109 shark::TrainingProgress<> criterion(5, m_Epsilon);
113 TrainOneLayer(criterion, i, inputSamples, ofs);
117 TrainOneSparseLayer(criterion, i, inputSamples, ofs);
123 shark::MaxIterations<> criterion(m_NumberOfIterations);
127 TrainOneLayer(criterion, i, inputSamples, ofs);
132 TrainOneSparseLayer(criterion, i, inputSamples, ofs);
137 inputSamples = m_InLayers[i](inputSamples);
139 if (m_NumberOfIterationsFineTuning > 0)
141 shark::MaxIterations<> criterion(m_NumberOfIterationsFineTuning);
142 TrainNetwork(criterion, inputSamples_copy, ofs);
144 this->SetDimension(m_NumberOfHiddenNeurons[m_NumberOfHiddenNeurons.Size() - 1]);
147 template <
class TInputValue,
class NeuronType>
150 shark::Data<shark::RealVector>& samples, std::ostream& File)
152 typedef shark::AbstractModel<shark::RealVector, shark::RealVector> BaseModelType;
154 net.add(&(m_InLayers[layer_index]),
true);
155 net.add((layer_index ? (BaseModelType*)&(m_InLayers[m_NumberOfHiddenNeurons.Size() * 2 - 1 - layer_index]) : (BaseModelType*)&m_OutLayer),
true);
158 std::size_t inputs = dataDimension(samples);
159 initRandomUniform(net, -m_InitFactor * std::sqrt(1.0 / inputs), m_InitFactor * std::sqrt(1.0 / inputs));
163 shark::LabeledData<shark::RealVector, shark::RealVector> trainSet(samples, samples);
164 shark::SquaredLoss<shark::RealVector> loss;
166 shark::ErrorFunction<> error(trainSet, &net, &loss);
168 shark::TwoNormRegularizer<> regularizer(error.numberOfVariables());
169 error.setRegularizer(m_Regularization[layer_index], ®ularizer);
171 shark::Adam<> optimizer;
173 optimizer.init(error);
175 otbMsgDevMacro(<<
"Error before training : " << optimizer.solution().value);
176 if (this->m_WriteLearningCurve ==
true)
178 File <<
"end layer" << std::endl;
185 optimizer.step(error);
186 if (this->m_WriteLearningCurve ==
true)
188 File << optimizer.solution().value << std::endl;
190 otbMsgDevMacro(<<
"Error after " << i <<
" iterations : " << optimizer.solution().value);
191 }
while (!criterion.stop(optimizer.solution()));
193 net.setParameterVector(optimizer.solution().point);
196 template <
class TInputValue,
class NeuronType>
199 shark::Data<shark::RealVector>& samples, std::ostream& File)
201 typedef shark::AbstractModel<shark::RealVector, shark::RealVector> BaseModelType;
203 net.add(&(m_InLayers[layer_index]),
true);
204 net.add((layer_index ? (BaseModelType*)&(m_InLayers[m_NumberOfHiddenNeurons.Size() * 2 - 1 - layer_index]) : (BaseModelType*)&m_OutLayer),
true);
206 std::size_t inputs = dataDimension(samples);
207 shark::initRandomUniform(net, -m_InitFactor * std::sqrt(1.0 / inputs), m_InitFactor * std::sqrt(1.0 / inputs));
211 shark::LabeledData<shark::RealVector, shark::RealVector> trainSet(samples, samples);
212 shark::SquaredLoss<shark::RealVector> loss;
215 shark::ErrorFunction<> error(trainSet, &net, &loss);
217 shark::TwoNormRegularizer<> regularizer(error.numberOfVariables());
218 error.setRegularizer(m_Regularization[layer_index], ®ularizer);
219 shark::Adam<> optimizer;
221 optimizer.init(error);
223 otbMsgDevMacro(<<
"Error before training : " << optimizer.solution().value);
228 optimizer.step(error);
229 otbMsgDevMacro(<<
"Error after " << i <<
" iterations : " << optimizer.solution().value);
230 if (this->m_WriteLearningCurve ==
true)
232 File << optimizer.solution().value << std::endl;
234 }
while (!criterion.stop(optimizer.solution()));
235 if (this->m_WriteLearningCurve ==
true)
237 File <<
"end layer" << std::endl;
239 net.setParameterVector(optimizer.solution().point);
242 template <
class TInputValue,
class NeuronType>
249 for (
auto& layer : m_InLayers)
251 net.add(&layer,
true);
253 net.add(&m_OutLayer,
true);
256 shark::LabeledData<shark::RealVector, shark::RealVector> trainSet(samples, samples);
257 shark::SquaredLoss<shark::RealVector> loss;
259 shark::ErrorFunction<> error(trainSet, &net, &loss);
260 shark::TwoNormRegularizer<> regularizer(error.numberOfVariables());
261 error.setRegularizer(m_Regularization[0], ®ularizer);
263 shark::Adam<> optimizer;
265 optimizer.init(error);
266 otbMsgDevMacro(<<
"Error before training : " << optimizer.solution().value);
268 while (!criterion.stop(optimizer.solution()))
271 optimizer.step(error);
272 otbMsgDevMacro(<<
"Error after " << i <<
" iterations : " << optimizer.solution().value);
273 if (this->m_WriteLearningCurve ==
true)
275 File << optimizer.solution().value << std::endl;
280 template <
class TInputValue,
class NeuronType>
285 this->Load(filename);
294 template <
class TInputValue,
class NeuronType>
300 template <
class TInputValue,
class NeuronType>
304 std::ofstream ofs(filename);
305 ofs <<
"Autoencoder" << std::endl;
306 ofs << (m_InLayers.size() + 1) << std::endl;
307 shark::TextOutArchive oa(ofs);
308 for (
const auto& layer : m_InLayers)
316 template <
class TInputValue,
class NeuronType>
319 std::ifstream ifs(filename);
322 ifs.getline(buffer, 256);
323 std::string bufferStr(buffer);
324 if (bufferStr !=
"Autoencoder")
326 itkExceptionMacro(<<
"Error opening " << filename.c_str());
329 ifs.getline(buffer, 256);
330 int nbLevels = boost::lexical_cast<int>(buffer);
331 if (nbLevels < 2 || nbLevels % 2 == 1)
333 itkExceptionMacro(<<
"Unexpected number of levels : " << buffer);
337 shark::TextInArchive ia(ifs);
338 for (
int i = 0; (i + 1) < nbLevels; i++)
342 m_InLayers.push_back(layer);
347 for (
int i = 0; i < nbLevels / 2; i++)
349 m_Encoder.add(&(m_InLayers[i]),
true);
352 this->SetDimension(m_Encoder.outputShape()[0]);
355 template <
class TInputValue,
class NeuronType>
359 shark::RealVector samples(value.Size());
360 for (
size_t i = 0; i < value.Size(); i++)
362 samples[i] = value[i];
365 std::vector<shark::RealVector> features;
366 features.push_back(samples);
368 shark::Data<shark::RealVector> data = shark::createDataFromRange(features);
371 data = m_Encoder(data);
373 target.SetSize(this->m_Dimension);
375 for (
unsigned int a = 0; a < this->m_Dimension; ++a)
377 target[a] = data.element(0)[a];
382 template <
class TInputValue,
class NeuronType>
387 std::vector<shark::RealVector> features;
388 Shark::ListSampleRangeToSharkVector(input, features, startIndex, size);
389 shark::Data<shark::RealVector> data = shark::createDataFromRange(features);
392 data = m_Encoder(data);
394 unsigned int id = startIndex;
395 target.SetSize(this->m_Dimension);
397 for (
const auto& p : data.elements())
399 for (
unsigned int a = 0; a < this->m_Dimension; ++a)
403 targets->SetMeasurementVector(
id, target);