Skip to content

Commit

Permalink
Revert "Use config instead."
Browse files Browse the repository at this point in the history
This reverts commit 6c21e7e.
  • Loading branch information
trivialfis committed Feb 1, 2021
1 parent 6c21e7e commit 41eadc5
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 5 deletions.
4 changes: 1 addition & 3 deletions python-package/xgboost/dask.py
Original file line number Diff line number Diff line change
Expand Up @@ -1009,10 +1009,8 @@ def _infer_predict_output(
test_sample = rng.randn(1, features)
if inplace:
# clear the state to avoid gpu_id, gpu_predictor
booster_config = booster.save_config()
booster.set_param({"predictor": "cpu_predictor", "gpu_id": -1})
booster = Booster(model_file=booster.save_raw())
test_predt = booster.inplace_predict(test_sample, **kwargs)
booster.load_config(booster_config)
else:
m = DMatrix(test_sample)
test_predt = booster.predict(m, **kwargs)
Expand Down
2 changes: 0 additions & 2 deletions src/learner.cc
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,6 @@ class LearnerConfiguration : public Learner {

void LoadConfig(Json const& in) override {
CHECK(IsA<Object>(in));
std::lock_guard<std::mutex> guard(config_lock_);
Version::Load(in);

auto const& learner_parameters = get<Object>(in["learner"]);
Expand Down Expand Up @@ -420,7 +419,6 @@ class LearnerConfiguration : public Learner {
}

void SetParam(const std::string& key, const std::string& value) override {
std::lock_guard<std::mutex> guard(config_lock_);
this->need_configuration_ = true;
if (key == kEvalMetric) {
if (std::find(metric_names_.cbegin(), metric_names_.cend(),
Expand Down

0 comments on commit 41eadc5

Please sign in to comment.