Skip to content

Commit

Permalink
fixed lint
Browse files Browse the repository at this point in the history
  • Loading branch information
StrikerRUS committed Jul 25, 2019
1 parent ee28ea3 commit e9a3b33
Show file tree
Hide file tree
Showing 10 changed files with 144 additions and 143 deletions.
2 changes: 1 addition & 1 deletion include/LightGBM/config.h
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,7 @@ struct Config {
// default = None
// desc = max number of bins for each feature
// desc = if not specified, will use ``max_bin`` for all features
std::vector<int32_t> max_bin_by_feature;
std::vector<int32_t> max_bin_by_feature;

// check = >0
// desc = minimal number of data inside one bin
Expand Down
2 changes: 1 addition & 1 deletion include/LightGBM/tree.h
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,7 @@ class Tree {
};

inline void Tree::Split(int leaf, int feature, int real_feature,
double left_value, double right_value, int left_cnt, int right_cnt,
double left_value, double right_value, int left_cnt, int right_cnt,
double left_weight, double right_weight, float gain) {
int new_node_idx = num_leaves_ - 1;
// update parent info
Expand Down
4 changes: 2 additions & 2 deletions src/boosting/gbdt.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ void GBDT::AddValidDataset(const Dataset* valid_data,
valid_metrics_.back().push_back(metric);
}
valid_metrics_.back().shrink_to_fit();

if (early_stopping_round_ > 0) {
auto num_metrics = valid_metrics.size();
if (es_first_metric_only_) { num_metrics = 1; }
Expand Down Expand Up @@ -739,7 +739,7 @@ void GBDT::ResetBaggingConfig(const Config* config, bool is_change_dataset) {
}
if (balance_bagging_cond) {
balanced_bagging_ = true;
bag_data_cnt_ = static_cast<data_size_t>(num_pos_data * config->pos_bagging_fraction)
bag_data_cnt_ = static_cast<data_size_t>(num_pos_data * config->pos_bagging_fraction)
+ static_cast<data_size_t>((num_data_ - num_pos_data) * config->neg_bagging_fraction);
} else {
bag_data_cnt_ = static_cast<data_size_t>(config->bagging_fraction * num_data_);
Expand Down
70 changes: 33 additions & 37 deletions src/c_api.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -711,11 +711,11 @@ int LGBM_DatasetCreateFromCSR(const void* indptr,
}

int LGBM_DatasetCreateFromCSRFunc(void* get_row_funptr,
int num_rows,
int64_t num_col,
const char* parameters,
const DatasetHandle reference,
DatasetHandle* out) {
int num_rows,
int64_t num_col,
const char* parameters,
const DatasetHandle reference,
DatasetHandle* out) {
API_BEGIN();

auto get_row_fun = *static_cast<std::function<void(int idx, std::vector<std::pair<int, double>>&)>*>(get_row_funptr);
Expand Down Expand Up @@ -767,10 +767,9 @@ int LGBM_DatasetCreateFromCSRFunc(void* get_row_funptr,
for (int i = 0; i < num_rows; ++i) {
OMP_LOOP_EX_BEGIN();
{
const int tid = omp_get_thread_num();
get_row_fun(i, threadBuffer);

ret->PushOneRow(tid, i, threadBuffer);
const int tid = omp_get_thread_num();
get_row_fun(i, threadBuffer);
ret->PushOneRow(tid, i, threadBuffer);
}
OMP_LOOP_EX_END();
}
Expand Down Expand Up @@ -1291,19 +1290,19 @@ int LGBM_BoosterPredictForCSR(BoosterHandle handle,
}

int LGBM_BoosterPredictForCSRSingleRow(BoosterHandle handle,
const void* indptr,
int indptr_type,
const int32_t* indices,
const void* data,
int data_type,
int64_t nindptr,
int64_t nelem,
int64_t,
int predict_type,
int num_iteration,
const char* parameter,
int64_t* out_len,
double* out_result) {
const void* indptr,
int indptr_type,
const int32_t* indices,
const void* data,
int data_type,
int64_t nindptr,
int64_t nelem,
int64_t,
int predict_type,
int num_iteration,
const char* parameter,
int64_t* out_len,
double* out_result) {
API_BEGIN();
auto param = Config::Str2Map(parameter);
Config config;
Expand All @@ -1313,8 +1312,7 @@ int LGBM_BoosterPredictForCSRSingleRow(BoosterHandle handle,
}
Booster* ref_booster = reinterpret_cast<Booster*>(handle);
auto get_row_fun = RowFunctionFromCSR(indptr, indptr_type, indices, data, data_type, nindptr, nelem);
ref_booster->PredictSingleRow(num_iteration, predict_type, get_row_fun,
config, out_result, out_len);
ref_booster->PredictSingleRow(num_iteration, predict_type, get_row_fun, config, out_result, out_len);
API_END();
}

Expand Down Expand Up @@ -1397,15 +1395,15 @@ int LGBM_BoosterPredictForMat(BoosterHandle handle,
}

int LGBM_BoosterPredictForMatSingleRow(BoosterHandle handle,
const void* data,
int data_type,
int32_t ncol,
int is_row_major,
int predict_type,
int num_iteration,
const char* parameter,
int64_t* out_len,
double* out_result) {
const void* data,
int data_type,
int32_t ncol,
int is_row_major,
int predict_type,
int num_iteration,
const char* parameter,
int64_t* out_len,
double* out_result) {
API_BEGIN();
auto param = Config::Str2Map(parameter);
Config config;
Expand All @@ -1415,8 +1413,7 @@ int LGBM_BoosterPredictForMatSingleRow(BoosterHandle handle,
}
Booster* ref_booster = reinterpret_cast<Booster*>(handle);
auto get_row_fun = RowPairFunctionFromDenseMatric(data, 1, ncol, data_type, is_row_major);
ref_booster->PredictSingleRow(num_iteration, predict_type, get_row_fun,
config, out_result, out_len);
ref_booster->PredictSingleRow(num_iteration, predict_type, get_row_fun, config, out_result, out_len);
API_END();
}

Expand All @@ -1440,8 +1437,7 @@ int LGBM_BoosterPredictForMats(BoosterHandle handle,
}
Booster* ref_booster = reinterpret_cast<Booster*>(handle);
auto get_row_fun = RowPairFunctionFromDenseRows(data, ncol, data_type);
ref_booster->Predict(num_iteration, predict_type, nrow, get_row_fun,
config, out_result, out_len);
ref_booster->Predict(num_iteration, predict_type, nrow, get_row_fun, config, out_result, out_len);
API_END();
}

Expand Down
2 changes: 1 addition & 1 deletion src/io/config.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ void Config::Set(const std::unordered_map<std::string, std::string>& params) {
GetTreeLearnerType(params, &tree_learner);

GetMembersFromString(params);

// sort eval_at
std::sort(eval_at.begin(), eval_at.end());

Expand Down
36 changes: 18 additions & 18 deletions src/io/dataset_loader.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -584,13 +584,13 @@ Dataset* DatasetLoader::CostructFromSampleData(double** sample_values,
bin_mappers[i].reset(new BinMapper());
if (config_.max_bin_by_feature.empty()) {
bin_mappers[i]->FindBin(sample_values[i], num_per_col[i], total_sample_size,
config_.max_bin, config_.min_data_in_bin, filter_cnt,
config_.max_bin, config_.min_data_in_bin, filter_cnt,
bin_type, config_.use_missing, config_.zero_as_missing);
} else {
bin_mappers[i]->FindBin(sample_values[i], num_per_col[i], total_sample_size,
config_.max_bin_by_feature[i], config_.min_data_in_bin,
filter_cnt, bin_type, config_.use_missing,
config_.zero_as_missing);
config_.max_bin_by_feature[i], config_.min_data_in_bin,
filter_cnt, bin_type, config_.use_missing,
config_.zero_as_missing);
}
OMP_LOOP_EX_END();
}
Expand Down Expand Up @@ -628,13 +628,13 @@ Dataset* DatasetLoader::CostructFromSampleData(double** sample_values,
}
bin_mappers[i].reset(new BinMapper());
if (config_.max_bin_by_feature.empty()) {
bin_mappers[i]->FindBin(sample_values[start[rank] + i], num_per_col[start[rank] + i],
total_sample_size, config_.max_bin, config_.min_data_in_bin,
bin_mappers[i]->FindBin(sample_values[start[rank] + i], num_per_col[start[rank] + i],
total_sample_size, config_.max_bin, config_.min_data_in_bin,
filter_cnt, bin_type, config_.use_missing, config_.zero_as_missing);
} else {
bin_mappers[i]->FindBin(sample_values[start[rank] + i], num_per_col[start[rank] + i],
total_sample_size, config_.max_bin_by_feature[start[rank] + i],
config_.min_data_in_bin, filter_cnt, bin_type, config_.use_missing,
bin_mappers[i]->FindBin(sample_values[start[rank] + i], num_per_col[start[rank] + i],
total_sample_size, config_.max_bin_by_feature[start[rank] + i],
config_.min_data_in_bin, filter_cnt, bin_type, config_.use_missing,
config_.zero_as_missing);
}
OMP_LOOP_EX_END();
Expand Down Expand Up @@ -908,12 +908,12 @@ void DatasetLoader::ConstructBinMappersFromTextData(int rank, int num_machines,
bin_mappers[i].reset(new BinMapper());
if (config_.max_bin_by_feature.empty()) {
bin_mappers[i]->FindBin(sample_values[i].data(), static_cast<int>(sample_values[i].size()),
sample_data.size(), config_.max_bin, config_.min_data_in_bin,
sample_data.size(), config_.max_bin, config_.min_data_in_bin,
filter_cnt, bin_type, config_.use_missing, config_.zero_as_missing);
} else {
bin_mappers[i]->FindBin(sample_values[i].data(), static_cast<int>(sample_values[i].size()),
sample_data.size(), config_.max_bin_by_feature[i],
config_.min_data_in_bin, filter_cnt, bin_type, config_.use_missing,
sample_data.size(), config_.max_bin_by_feature[i],
config_.min_data_in_bin, filter_cnt, bin_type, config_.use_missing,
config_.zero_as_missing);
}
OMP_LOOP_EX_END();
Expand Down Expand Up @@ -952,16 +952,16 @@ void DatasetLoader::ConstructBinMappersFromTextData(int rank, int num_machines,
}
bin_mappers[i].reset(new BinMapper());
if (config_.max_bin_by_feature.empty()) {
bin_mappers[i]->FindBin(sample_values[start[rank] + i].data(),
bin_mappers[i]->FindBin(sample_values[start[rank] + i].data(),
static_cast<int>(sample_values[start[rank] + i].size()),
sample_data.size(), config_.max_bin, config_.min_data_in_bin,
sample_data.size(), config_.max_bin, config_.min_data_in_bin,
filter_cnt, bin_type, config_.use_missing, config_.zero_as_missing);
} else {
bin_mappers[i]->FindBin(sample_values[start[rank] + i].data(),
bin_mappers[i]->FindBin(sample_values[start[rank] + i].data(),
static_cast<int>(sample_values[start[rank] + i].size()),
sample_data.size(), config_.max_bin_by_feature[i],
config_.min_data_in_bin, filter_cnt, bin_type,
config_.use_missing, config_.zero_as_missing);
sample_data.size(), config_.max_bin_by_feature[i],
config_.min_data_in_bin, filter_cnt, bin_type,
config_.use_missing, config_.zero_as_missing);
}
OMP_LOOP_EX_END();
}
Expand Down
8 changes: 3 additions & 5 deletions src/io/tree.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -515,7 +515,7 @@ Tree::Tree(const char* str, size_t* used_len) {
} else {
Log::Fatal("Tree model string format error, should contain leaf_value field");
}

if (key_vals.count("shrinkage")) {
Common::Atof(key_vals["shrinkage"].c_str(), &shrinkage_);
} else {
Expand Down Expand Up @@ -568,15 +568,13 @@ Tree::Tree(const char* str, size_t* used_len) {

if (key_vals.count("internal_weight")) {
internal_weight_ = Common::StringToArrayFast<double>(key_vals["internal_weight"], num_leaves_ - 1);
}
else {
} else {
internal_weight_.resize(num_leaves_ - 1);
}

if (key_vals.count("leaf_weight")) {
leaf_weight_ = Common::StringToArrayFast<double>(key_vals["leaf_weight"], num_leaves_);
}
else {
} else {
leaf_weight_.resize(num_leaves_);
}

Expand Down
9 changes: 6 additions & 3 deletions src/metric/multiclass_metric.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ namespace LightGBM {
template<typename PointWiseLossCalculator>
class MulticlassMetric: public Metric {
public:
explicit MulticlassMetric(const Config& config) :config_(config){
explicit MulticlassMetric(const Config& config) :config_(config) {
num_class_ = config.num_class;
}

Expand Down Expand Up @@ -149,8 +149,11 @@ class MultiErrorMetric: public MulticlassMetric<MultiErrorMetric> {
}

inline static const std::string Name(const Config& config) {
if (config.multi_error_top_k == 1) return "multi_error";
else return "multi_error@" + std::to_string(config.multi_error_top_k);
if (config.multi_error_top_k == 1) {
return "multi_error";
} else {
return "multi_error@" + std::to_string(config.multi_error_top_k);
}
}
};

Expand Down
20 changes: 10 additions & 10 deletions src/objective/regression_objective.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -239,23 +239,23 @@ class RegressionL1loss: public RegressionL2loss {
const double alpha = 0.5;
if (weights_ == nullptr) {
if (bagging_mapper == nullptr) {
#define data_reader(i) (residual_getter(label_,index_mapper[i]))
#define data_reader(i) (residual_getter(label_, index_mapper[i]))
PercentileFun(double, data_reader, num_data_in_leaf, alpha);
#undef data_reader
} else {
#define data_reader(i) (residual_getter(label_,bagging_mapper[index_mapper[i]]))
#define data_reader(i) (residual_getter(label_, bagging_mapper[index_mapper[i]]))
PercentileFun(double, data_reader, num_data_in_leaf, alpha);
#undef data_reader
}
} else {
if (bagging_mapper == nullptr) {
#define data_reader(i) (residual_getter(label_,index_mapper[i]))
#define data_reader(i) (residual_getter(label_, index_mapper[i]))
#define weight_reader(i) (weights_[index_mapper[i]])
WeightedPercentileFun(double, data_reader, weight_reader, num_data_in_leaf, alpha);
#undef data_reader
#undef weight_reader
} else {
#define data_reader(i) (residual_getter(label_,bagging_mapper[index_mapper[i]]))
#define data_reader(i) (residual_getter(label_, bagging_mapper[index_mapper[i]]))
#define weight_reader(i) (weights_[bagging_mapper[index_mapper[i]]])
WeightedPercentileFun(double, data_reader, weight_reader, num_data_in_leaf, alpha);
#undef data_reader
Expand Down Expand Up @@ -526,23 +526,23 @@ class RegressionQuantileloss : public RegressionL2loss {
data_size_t num_data_in_leaf) const override {
if (weights_ == nullptr) {
if (bagging_mapper == nullptr) {
#define data_reader(i) (residual_getter(label_,index_mapper[i]))
#define data_reader(i) (residual_getter(label_, index_mapper[i]))
PercentileFun(double, data_reader, num_data_in_leaf, alpha_);
#undef data_reader
} else {
#define data_reader(i) (residual_getter(label_,bagging_mapper[index_mapper[i]]))
#define data_reader(i) (residual_getter(label_, bagging_mapper[index_mapper[i]]))
PercentileFun(double, data_reader, num_data_in_leaf, alpha_);
#undef data_reader
}
} else {
if (bagging_mapper == nullptr) {
#define data_reader(i) (residual_getter(label_,index_mapper[i]))
#define data_reader(i) (residual_getter(label_, index_mapper[i]))
#define weight_reader(i) (weights_[index_mapper[i]])
WeightedPercentileFun(double, data_reader, weight_reader, num_data_in_leaf, alpha_);
#undef data_reader
#undef weight_reader
} else {
#define data_reader(i) (residual_getter(label_,bagging_mapper[index_mapper[i]]))
#define data_reader(i) (residual_getter(label_, bagging_mapper[index_mapper[i]]))
#define weight_reader(i) (weights_[bagging_mapper[index_mapper[i]]])
WeightedPercentileFun(double, data_reader, weight_reader, num_data_in_leaf, alpha_);
#undef data_reader
Expand Down Expand Up @@ -627,13 +627,13 @@ class RegressionMAPELOSS : public RegressionL1loss {
data_size_t num_data_in_leaf) const override {
const double alpha = 0.5;
if (bagging_mapper == nullptr) {
#define data_reader(i) (residual_getter(label_,index_mapper[i]))
#define data_reader(i) (residual_getter(label_, index_mapper[i]))
#define weight_reader(i) (label_weight_[index_mapper[i]])
WeightedPercentileFun(double, data_reader, weight_reader, num_data_in_leaf, alpha);
#undef data_reader
#undef weight_reader
} else {
#define data_reader(i) (residual_getter(label_,bagging_mapper[index_mapper[i]]))
#define data_reader(i) (residual_getter(label_, bagging_mapper[index_mapper[i]]))
#define weight_reader(i) (label_weight_[bagging_mapper[index_mapper[i]]])
WeightedPercentileFun(double, data_reader, weight_reader, num_data_in_leaf, alpha);
#undef data_reader
Expand Down

0 comments on commit e9a3b33

Please sign in to comment.