21 #ifndef otbTrainBoost_hxx
22 #define otbTrainBoost_hxx
31 template <
class TInputValue,
class TOutputValue>
32 void LearningApplicationBase<TInputValue, TOutputValue>::InitBoostParams()
34 AddChoice(
"classifier.boost",
"Boost classifier");
35 SetParameterDescription(
"classifier.boost",
"http://docs.opencv.org/modules/ml/doc/boosting.html");
38 AddChoice(
"classifier.boost.t.discrete",
"Discrete AdaBoost");
39 SetParameterDescription(
"classifier.boost.t.discrete",
40 "This procedure trains the classifiers on weighted versions of the training "
41 "sample, giving higher weight to cases that are currently misclassified. "
42 "This is done for a sequence of weighter samples, and then the final "
43 "classifier is defined as a linear combination of the classifier from "
45 AddChoice(
"classifier.boost.t.real",
46 "Real AdaBoost (technique using confidence-rated predictions "
47 "and working well with categorical data)");
48 SetParameterDescription(
"classifier.boost.t.real",
"Adaptation of the Discrete Adaboost algorithm with Real value");
49 AddChoice(
"classifier.boost.t.logit",
"LogitBoost (technique producing good regression fits)");
50 SetParameterDescription(
"classifier.boost.t.logit",
51 "This procedure is an adaptive Newton algorithm for fitting an additive "
52 "logistic regression model. Beware it can produce numeric instability.");
53 AddChoice(
"classifier.boost.t.gentle",
54 "Gentle AdaBoost (technique setting less weight on outlier data points "
55 "and, for that reason, being often good with regression data)");
56 SetParameterDescription(
"classifier.boost.t.gentle",
57 "A modified version of the Real Adaboost algorithm, using Newton stepping "
58 "rather than exact optimization at each step.");
59 SetParameterString(
"classifier.boost.t",
"real");
60 SetParameterDescription(
"classifier.boost.t",
"Type of Boosting algorithm.");
63 SetParameterInt(
"classifier.boost.w", 100);
64 SetParameterDescription(
"classifier.boost.w",
"The number of weak classifiers.");
67 SetParameterFloat(
"classifier.boost.r", 0.95);
68 SetParameterDescription(
"classifier.boost.r",
69 "A threshold between 0 and 1 used to save computational time. "
70 "Samples with summary weight <= (1 - weight_trim_rate) do not participate in"
71 " the next iteration of training. Set this parameter to 0 to turn off this "
74 AddParameter(
ParameterType_Int,
"classifier.boost.m",
"Maximum depth of the tree");
75 SetParameterInt(
"classifier.boost.m", 1);
76 SetParameterDescription(
"classifier.boost.m",
"Maximum depth of the tree.");
79 template <
class TInputValue,
class TOutputValue>
80 void LearningApplicationBase<TInputValue, TOutputValue>::TrainBoost(
typename ListSampleType::Pointer trainingListSample,
81 typename TargetListSampleType::Pointer trainingLabeledListSample, std::string modelPath)
84 typename BoostType::Pointer boostClassifier = BoostType::New();
85 boostClassifier->SetRegressionMode(this->m_RegressionFlag);
86 boostClassifier->SetInputListSample(trainingListSample);
87 boostClassifier->SetTargetListSample(trainingLabeledListSample);
88 boostClassifier->SetBoostType(GetParameterInt(
"classifier.boost.t"));
89 boostClassifier->SetWeakCount(GetParameterInt(
"classifier.boost.w"));
90 boostClassifier->SetWeightTrimRate(GetParameterFloat(
"classifier.boost.r"));
91 boostClassifier->SetMaxDepth(GetParameterInt(
"classifier.boost.m"));
93 boostClassifier->Train();
94 boostClassifier->Save(modelPath);