/
learner.h
349 lines (324 loc) · 12.5 KB
/
learner.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
/**
* Copyright 2015-2023 by XGBoost Contributors
* \file learner.h
* \brief Learner interface that integrates objective, gbm and evaluation together.
* This is the user facing XGBoost training module.
* \author Tianqi Chen
*/
#ifndef XGBOOST_LEARNER_H_
#define XGBOOST_LEARNER_H_
#include <dmlc/io.h> // for Serializable
#include <xgboost/base.h> // for bst_feature_t, bst_target_t, bst_float, Args, GradientPair, ..
#include <xgboost/context.h> // for Context
#include <xgboost/linalg.h> // for Tensor, TensorView
#include <xgboost/metric.h> // for Metric
#include <xgboost/model.h> // for Configurable, Model
#include <xgboost/span.h> // for Span
#include <xgboost/task.h> // for ObjInfo
#include <algorithm> // for max
#include <cstdint> // for int32_t, uint32_t, uint8_t
#include <map> // for map
#include <memory> // for shared_ptr, unique_ptr
#include <string> // for string
#include <utility> // for move
#include <vector> // for vector
namespace xgboost {
class FeatureMap;
class Metric;
class GradientBooster;
class ObjFunction;
class DMatrix;
class Json;
struct XGBAPIThreadLocalEntry;
template <typename T>
class HostDeviceVector;
enum class PredictionType : std::uint8_t { // NOLINT
kValue = 0,
kMargin = 1,
kContribution = 2,
kApproxContribution = 3,
kInteraction = 4,
kApproxInteraction = 5,
kLeaf = 6
};
/*!
* \brief Learner class that does training and prediction.
* This is the user facing module of xgboost training.
* The Load/Save function corresponds to the model used in python/R.
* \code
*
* std::unique_ptr<Learner> learner(new Learner::Create(cache_mats));
* learner.Configure(configs);
*
* for (int iter = 0; iter < max_iter; ++iter) {
* learner->UpdateOneIter(iter, train_mat);
* LOG(INFO) << learner->EvalOneIter(iter, data_sets, data_names);
* }
*
* \endcode
*/
class Learner : public Model, public Configurable, public dmlc::Serializable {
public:
/*! \brief virtual destructor */
~Learner() override;
/*!
* \brief Configure Learner based on set parameters.
*/
virtual void Configure() = 0;
/*!
* \brief update the model for one iteration
* With the specified objective function.
* \param iter current iteration number
* \param train reference to the data matrix.
*/
virtual void UpdateOneIter(std::int32_t iter, std::shared_ptr<DMatrix> train) = 0;
/**
* @brief Do customized gradient boosting with in_gpair.
*
* @note in_gpair can be mutated after this call.
*
* @param iter current iteration number
* @param train reference to the data matrix.
* @param in_gpair The input gradient statistics.
*/
virtual void BoostOneIter(std::int32_t iter, std::shared_ptr<DMatrix> train,
linalg::Matrix<GradientPair>* in_gpair) = 0;
/*!
* \brief evaluate the model for specific iteration using the configured metrics.
* \param iter iteration number
* \param data_sets datasets to be evaluated.
* \param data_names name of each dataset
* \return a string corresponding to the evaluation result
*/
virtual std::string EvalOneIter(int iter,
const std::vector<std::shared_ptr<DMatrix>>& data_sets,
const std::vector<std::string>& data_names) = 0;
/*!
* \brief get prediction given the model.
* \param data input data
* \param output_margin whether to only predict margin value instead of transformed prediction
* \param out_preds output vector that stores the prediction
* \param layer_begin Beginning of boosted tree layer used for prediction.
* \param layer_end End of booster layer. 0 means do not limit trees.
* \param training Whether the prediction result is used for training
* \param pred_leaf whether to only predict the leaf index of each tree in a boosted tree predictor
* \param pred_contribs whether to only predict the feature contributions
* \param approx_contribs whether to approximate the feature contributions for speed
* \param pred_interactions whether to compute the feature pair contributions
*/
virtual void Predict(std::shared_ptr<DMatrix> data, bool output_margin,
HostDeviceVector<bst_float>* out_preds, bst_layer_t layer_begin,
bst_layer_t layer_end, bool training = false, bool pred_leaf = false,
bool pred_contribs = false, bool approx_contribs = false,
bool pred_interactions = false) = 0;
/*!
* \brief Inplace prediction.
*
* \param p_fmat A proxy DMatrix that contains the data and related meta info.
* \param type Prediction type.
* \param missing Missing value in the data.
* \param [in,out] out_preds Pointer to output prediction vector.
* \param layer_begin Beginning of boosted tree layer used for prediction.
* \param layer_end End of booster layer. 0 means do not limit trees.
*/
virtual void InplacePredict(std::shared_ptr<DMatrix> p_m, PredictionType type, float missing,
HostDeviceVector<float>** out_preds, bst_layer_t layer_begin,
bst_layer_t layer_end) = 0;
/*!
* \brief Calculate feature score. See doc in C API for outputs.
*/
virtual void CalcFeatureScore(std::string const& importance_type,
common::Span<int32_t const> trees,
std::vector<bst_feature_t>* features,
std::vector<float>* scores) = 0;
/*
* \brief Get number of boosted rounds from gradient booster.
*/
virtual int32_t BoostedRounds() const = 0;
/**
* \brief Get the number of output groups from the model.
*/
virtual std::uint32_t Groups() const = 0;
void LoadModel(Json const& in) override = 0;
void SaveModel(Json* out) const override = 0;
virtual void LoadModel(dmlc::Stream* fi) = 0;
virtual void SaveModel(dmlc::Stream* fo) const = 0;
/*!
* \brief Set multiple parameters at once.
*
* \param args parameters.
*/
virtual void SetParams(Args const& args) = 0;
/*!
* \brief Set parameter for booster
*
* The property will NOT be saved along with booster
*
* \param key The key of parameter
* \param value The value of parameter
*/
virtual void SetParam(const std::string& key, const std::string& value) = 0;
/*!
* \brief Get the number of features of the booster.
* \return number of features
*/
virtual uint32_t GetNumFeature() const = 0;
/*!
* \brief Set additional attribute to the Booster.
*
* The property will be saved along the booster.
*
* \param key The key of the property.
* \param value The value of the property.
*/
virtual void SetAttr(const std::string& key, const std::string& value) = 0;
/*!
* \brief Get attribute from the booster.
* The property will be saved along the booster.
* \param key The key of the attribute.
* \param out The output value.
* \return Whether the key exists among booster's attributes.
*/
virtual bool GetAttr(const std::string& key, std::string* out) const = 0;
/*!
* \brief Delete an attribute from the booster.
* \param key The key of the attribute.
* \return Whether the key was found among booster's attributes.
*/
virtual bool DelAttr(const std::string& key) = 0;
/*!
* \brief Get a vector of attribute names from the booster.
* \return vector of attribute name strings.
*/
virtual std::vector<std::string> GetAttrNames() const = 0;
/*!
* \brief Set the feature names for current booster.
* \param fn Input feature names
*/
virtual void SetFeatureNames(std::vector<std::string> const& fn) = 0;
/*!
* \brief Get the feature names for current booster.
* \param fn Output feature names
*/
virtual void GetFeatureNames(std::vector<std::string>* fn) const = 0;
/*!
* \brief Set the feature types for current booster.
* \param ft Input feature types.
*/
virtual void SetFeatureTypes(std::vector<std::string> const& ft) = 0;
/*!
* \brief Get the feature types for current booster.
* \param fn Output feature types
*/
virtual void GetFeatureTypes(std::vector<std::string>* ft) const = 0;
/**
* \brief Slice the model.
*
* See InplacePredict for layer parameters.
*
* \param step step size between slice.
* \param out_of_bound Return true if end layer is out of bound.
*
* \return a sliced model.
*/
virtual Learner* Slice(bst_layer_t begin, bst_layer_t end, bst_layer_t step,
bool* out_of_bound) = 0;
/*!
* \brief dump the model in the requested format
* \param fmap feature map that may help give interpretations of feature
* \param with_stats extra statistics while dumping model
* \param format the format to dump the model in
* \return a vector of dump for boosters.
*/
virtual std::vector<std::string> DumpModel(const FeatureMap& fmap,
bool with_stats,
std::string format) = 0;
virtual XGBAPIThreadLocalEntry& GetThreadLocal() const = 0;
/*!
* \brief Create a new instance of learner.
* \param cache_data The matrix to cache the prediction.
* \return Created learner.
*/
static Learner* Create(const std::vector<std::shared_ptr<DMatrix> >& cache_data);
/**
* \brief Return the context object of this Booster.
*/
virtual Context const* Ctx() const = 0;
/*!
* \brief Get configuration arguments currently stored by the learner
* \return Key-value pairs representing configuration arguments
*/
virtual const std::map<std::string, std::string>& GetConfigurationArguments() const = 0;
protected:
/*! \brief objective function */
std::unique_ptr<ObjFunction> obj_;
/*! \brief The gradient booster used by the model*/
std::unique_ptr<GradientBooster> gbm_;
/*! \brief The evaluation metrics used to evaluate the model. */
std::vector<std::unique_ptr<Metric> > metrics_;
/*! \brief Training parameter. */
Context ctx_;
};
struct LearnerModelParamLegacy;
/**
* \brief Strategy for building multi-target models.
*/
enum class MultiStrategy : std::int32_t {
kOneOutputPerTree = 0,
kMultiOutputTree = 1,
};
/**
* \brief Basic model parameters, used to describe the booster.
*/
struct LearnerModelParam {
private:
/**
* \brief Global bias, this is just a scalar value but can be extended to vector when we
* support multi-class and multi-target.
*/
linalg::Tensor<float, 1> base_score_;
public:
/**
* \brief The number of features.
*/
bst_feature_t num_feature{0};
/**
* \brief The number of classes or targets.
*/
std::uint32_t num_output_group{0};
/**
* \brief Current task, determined by objective.
*/
ObjInfo task{ObjInfo::kRegression};
/**
* \brief Strategy for building multi-target models.
*/
MultiStrategy multi_strategy{MultiStrategy::kOneOutputPerTree};
LearnerModelParam() = default;
// As the old `LearnerModelParamLegacy` is still used by binary IO, we keep
// this one as an immutable copy.
LearnerModelParam(Context const* ctx, LearnerModelParamLegacy const& user_param,
linalg::Tensor<float, 1> base_margin, ObjInfo t, MultiStrategy multi_strategy);
LearnerModelParam(LearnerModelParamLegacy const& user_param, ObjInfo t,
MultiStrategy multi_strategy);
LearnerModelParam(bst_feature_t n_features, linalg::Tensor<float, 1> base_score,
std::uint32_t n_groups, bst_target_t n_targets, MultiStrategy multi_strategy)
: base_score_{std::move(base_score)},
num_feature{n_features},
num_output_group{std::max(n_groups, n_targets)},
multi_strategy{multi_strategy} {}
linalg::TensorView<float const, 1> BaseScore(Context const* ctx) const;
[[nodiscard]] linalg::TensorView<float const, 1> BaseScore(DeviceOrd device) const;
void Copy(LearnerModelParam const& that);
[[nodiscard]] bool IsVectorLeaf() const noexcept {
return multi_strategy == MultiStrategy::kMultiOutputTree;
}
[[nodiscard]] bst_target_t OutputLength() const noexcept { return this->num_output_group; }
[[nodiscard]] bst_target_t LeafLength() const noexcept {
return this->IsVectorLeaf() ? this->OutputLength() : 1;
}
/* \brief Whether this parameter is initialized with LearnerModelParamLegacy. */
[[nodiscard]] bool Initialized() const { return num_feature != 0 && num_output_group != 0; }
};
} // namespace xgboost
#endif // XGBOOST_LEARNER_H_