From 4fc513b07a72e59e082929604ff9563f799a9ae6 Mon Sep 17 00:00:00 2001 From: sneaxiy Date: Mon, 22 Jun 2020 02:28:18 +0000 Subject: [PATCH] remove XGBoostObjectiveJSON --- pkg/attribute/attribute.go | 6 ---- pkg/attribute/xgboost_objective_params.go | 34 +++++++++++------------ 2 files changed, 16 insertions(+), 24 deletions(-) diff --git a/pkg/attribute/attribute.go b/pkg/attribute/attribute.go index a62f5f10b2..c79548aded 100644 --- a/pkg/attribute/attribute.go +++ b/pkg/attribute/attribute.go @@ -372,9 +372,6 @@ var extractSymbolOnce sync.Once // OptimizerParamsDocs stores parameters and documents of optimizers var OptimizerParamsDocs map[string]map[string]string -// XGBoostObjectiveDocs stores options for xgboost objective -var XGBoostObjectiveDocs map[string]string - // ExtractSymbol extracts parameter documents of Python modules from doc strings func ExtractSymbol(module ...string) { cmd := exec.Command("python", "-uc", fmt.Sprintf("__import__('symbol_extractor').print_param_doc('%s')", strings.Join(module, "', '"))) @@ -409,8 +406,5 @@ func init() { if err := json.Unmarshal([]byte(OptimizerParameterJSON), &OptimizerParamsDocs); err != nil { panic(err) // assertion } - if err := json.Unmarshal([]byte(XGBoostObjectiveJSON), &XGBoostObjectiveDocs); err != nil { - panic(err) - } removeUnnecessaryParams() } diff --git a/pkg/attribute/xgboost_objective_params.go b/pkg/attribute/xgboost_objective_params.go index afd28559b9..66161127de 100644 --- a/pkg/attribute/xgboost_objective_params.go +++ b/pkg/attribute/xgboost_objective_params.go @@ -13,23 +13,21 @@ package attribute -// XGBoostObjectiveJSON is xgboost objective param json extracted +// XGBoostObjectiveDocs is xgboost objective parameter docs extracted // from https://xgboost.readthedocs.io/en/latest/parameter.html -const XGBoostObjectiveJSON = ` -{ - "binary:hinge": "hinge loss for binary classification. This makes predictions of 0 or 1, rather than producing probabilities.", - "binary:logistic": "logistic regression for binary classification, output probability", - "binary:logitraw": "logistic regression for binary classification, output score before logistic transformation", - "multi:softmax": "set XGBoost to do multiclass classification using the softmax objective, you also need to set num_class(number of classes)", - "multi:softprob": "same as softmax, but output a vector of ndata * nclass, which can be further reshaped to ndata * nclass matrix. The result contains predicted probability of each data point belonging to each class.", - "rank:map": "Use LambdaMART to perform list-wise ranking where Mean Average Precision (MAP) is maximized", - "rank:ndcg": "Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized", - "rank:pairwise": "Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized", - "reg:gamma": "gamma regression with log-link. Output is a mean of gamma distribution. It might be useful, e.g., for modeling insurance claims severity, or for any outcome that might be gamma-distributed.", - "reg:logistic": "logistic regression", - "reg:squarederror": "regression with squared loss.", - "reg:squaredlogerror": "regression with squared log loss 1/2[log(pred+1)\u2212log(label+1)]^2", - "reg:tweedie": "Tweedie regression with log-link. It might be useful, e.g., for modeling total loss in insurance, or for any outcome that might be Tweedie-distributed.", - "survival:cox": "Cox regression for right censored survival time data (negative values are considered right censored). Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) in the proportional hazard function h(t) = h0(t) * HR)." +var XGBoostObjectiveDocs = map[string]string{ + "binary:hinge": "hinge loss for binary classification. This makes predictions of 0 or 1, rather than producing probabilities.", + "binary:logistic": "logistic regression for binary classification, output probability", + "binary:logitraw": "logistic regression for binary classification, output score before logistic transformation", + "multi:softmax": "set XGBoost to do multiclass classification using the softmax objective, you also need to set num_class(number of classes)", + "multi:softprob": "same as softmax, but output a vector of ndata * nclass, which can be further reshaped to ndata * nclass matrix. The result contains predicted probability of each data point belonging to each class.", + "rank:map": "Use LambdaMART to perform list-wise ranking where Mean Average Precision (MAP) is maximized", + "rank:ndcg": "Use LambdaMART to perform list-wise ranking where Normalized Discounted Cumulative Gain (NDCG) is maximized", + "rank:pairwise": "Use LambdaMART to perform pairwise ranking where the pairwise loss is minimized", + "reg:gamma": "gamma regression with log-link. Output is a mean of gamma distribution. It might be useful, e.g., for modeling insurance claims severity, or for any outcome that might be gamma-distributed.", + "reg:logistic": "logistic regression", + "reg:squarederror": "regression with squared loss.", + "reg:squaredlogerror": "regression with squared log loss 1/2[log(pred+1)\u2212log(label+1)]^2", + "reg:tweedie": "Tweedie regression with log-link. It might be useful, e.g., for modeling total loss in insurance, or for any outcome that might be Tweedie-distributed.", + "survival:cox": "Cox regression for right censored survival time data (negative values are considered right censored). Note that predictions are returned on the hazard ratio scale (i.e., as HR = exp(marginal_prediction) in the proportional hazard function h(t) = h0(t) * HR).", } -`