Skip to content

Commit

Permalink
Fix [R-package] Prevent remembering parameters (#799)
Browse files Browse the repository at this point in the history
* Revert "[R-package] Prevent remembering parameters (#796)"

This reverts commit c795e2c.

* Use model load/unload trick for free-ing up memory

* Missing comma

* Hand copy manual update (1/2)

* Hand copy manual update (2/2)
  • Loading branch information
Laurae2 authored and guolinke committed Aug 18, 2017
1 parent 4e9b589 commit 3ec345e
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 14 deletions.
8 changes: 1 addition & 7 deletions R-package/R/lgb.Dataset.R
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
Dataset <- R6Class(
classname = "lgb.Dataset",
cloneable = TRUE,
cloneable = FALSE,
public = list(

# Logical to check whether a dataset can be used re-modeled in-memory as another Dataset or not
remodel = TRUE,

# Finalize will free up the handles
finalize = function() {

Expand Down Expand Up @@ -279,9 +276,6 @@ Dataset <- R6Class(
stop("lgb.Dataset.construct: label should be set")
}

# Forcefully block construction
self$remodel <- FALSE

# Return self
return(invisible(self))

Expand Down
25 changes: 20 additions & 5 deletions R-package/R/lgb.train.R
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
#' If there's more than one, will check all of them
#' Returns the model with (best_iter + early_stopping_rounds)
#' If early stopping occurs, the model will have 'best_iter' field
#' @param reset_data Boolean, setting it to TRUE (not the default value) will transform the booster model into a predictor model which frees up memory and the original datasets
#' @param callbacks list of callback functions
#' List of callback functions that are applied at each iteration.
#' @param ... other parameters, see parameters.md for more informations
Expand Down Expand Up @@ -70,6 +71,7 @@ lgb.train <- function(params = list(),
categorical_feature = NULL,
early_stopping_rounds = NULL,
callbacks = list(),
reset_data = FALSE,
...) {

# Setup temporary variables
Expand Down Expand Up @@ -153,10 +155,7 @@ lgb.train <- function(params = list(),
}

# Construct datasets, if needed
if (data$remodel == TRUE) {
data <- data$clone(deep = FALSE)
data$construct()
}
data$construct()
vaild_contain_train <- FALSE
train_data_name <- "train"
reduced_valid_sets <- list()
Expand Down Expand Up @@ -218,7 +217,7 @@ lgb.train <- function(params = list(),
env$model <- booster
env$begin_iteration <- begin_iteration
env$end_iteration <- end_iteration

# Start training model using number of iterations to start and end with
for (i in seq(from = begin_iteration, to = end_iteration)) {

Expand Down Expand Up @@ -262,6 +261,22 @@ lgb.train <- function(params = list(),

}

# Check for booster model conversion to predictor model
if (reset_data) {

# Store temporarily model data elsewhere
booster_old <- list(best_iter = booster$best_iter,
best_score = booster$best_score,
record_evals = booster$record_evals)

# Reload model
booster <- lgb.load(model_str = booster$save_model_to_string())
booster$best_iter <- booster_old$best_iter
booster$best_score <- booster_old$best_score
booster$record_evals <- booster_old$record_evals

}

# Return booster
return(booster)

Expand Down
5 changes: 4 additions & 1 deletion R-package/man/lgb.train.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion R-package/man/predict.lgb.Booster.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 3ec345e

Please sign in to comment.