Skip to content

Commit

Permalink
fixed cpplint error about spaces and newlines (#2068)
Browse files Browse the repository at this point in the history
  • Loading branch information
StrikerRUS committed Mar 26, 2019
1 parent 823fc03 commit 3c999be
Show file tree
Hide file tree
Showing 20 changed files with 98 additions and 144 deletions.
2 changes: 1 addition & 1 deletion include/LightGBM/dataset_loader.h
Expand Up @@ -73,6 +73,6 @@ class DatasetLoader {
std::unordered_set<int> categorical_features_;
};

}
} // namespace LightGBM

#endif // LIGHTGBM_DATASET_LOADER_H_
4 changes: 2 additions & 2 deletions include/LightGBM/feature_group.h
Expand Up @@ -212,14 +212,14 @@ class FeatureGroup {
/*! \brief Disable copy */
FeatureGroup& operator=(const FeatureGroup&) = delete;
/*! \brief Deep copy */
FeatureGroup(const FeatureGroup& other){
FeatureGroup(const FeatureGroup& other) {
num_feature_ = other.num_feature_;
is_sparse_ = other.is_sparse_;
num_total_bin_ = other.num_total_bin_;
bin_offsets_ = other.bin_offsets_;

bin_mappers_.reserve(other.bin_mappers_.size());
for(auto& bin_mapper : other.bin_mappers_){
for (auto& bin_mapper : other.bin_mappers_) {
bin_mappers_.emplace_back(new BinMapper(*bin_mapper));
}

Expand Down
2 changes: 1 addition & 1 deletion include/LightGBM/json11.hpp
Expand Up @@ -77,7 +77,7 @@ enum JsonParse {
class JsonValue;

class Json final {
public:
public:
// Types
enum Type {
NUL, NUMBER, BOOL, STRING, ARRAY, OBJECT
Expand Down
2 changes: 1 addition & 1 deletion include/LightGBM/tree.h
Expand Up @@ -331,7 +331,7 @@ class Tree {
PathElement(int i, double z, double o, double w) : feature_index(i), zero_fraction(z), one_fraction(o), pweight(w) {}
};

/*! \brief Polynomial time algorithm for SHAP values (https://arxiv.org/abs/1706.06060)*/
/*! \brief Polynomial time algorithm for SHAP values (arXiv:1706.06060)*/
void TreeSHAP(const double *feature_values, double *phi,
int node, int unique_depth,
PathElement *parent_unique_path, double parent_zero_fraction,
Expand Down
3 changes: 1 addition & 2 deletions include/LightGBM/utils/common.h
Expand Up @@ -341,8 +341,7 @@ inline static void Uint32ToStr(uint32_t value, char* buffer) {

if (value < 10) {
*--buffer = char(value) + '0';
}
else {
} else {
const unsigned i = value << 1;
*--buffer = kDigitsLut[i + 1];
*--buffer = kDigitsLut[i];
Expand Down
21 changes: 7 additions & 14 deletions include/LightGBM/utils/text_reader.h
Expand Up @@ -100,8 +100,7 @@ class TextReader {
last_line_.append(buffer_process + last_i, i - last_i);
process_fun(total_cnt, last_line_.c_str(), last_line_.size());
last_line_ = "";
}
else {
} else {
process_fun(total_cnt, buffer_process + last_i, i - last_i);
}
++cnt;
Expand All @@ -110,8 +109,7 @@ class TextReader {
// skip end of line
while ((buffer_process[i] == '\n' || buffer_process[i] == '\r') && i < read_cnt) { ++i; }
last_i = i;
}
else {
} else {
++i;
}
}
Expand Down Expand Up @@ -167,8 +165,7 @@ class TextReader {
if (cur_sample_cnt < sample_cnt) {
out_sampled_data->emplace_back(buffer, size);
++cur_sample_cnt;
}
else {
} else {
const size_t idx = static_cast<size_t>(random.NextInt(0, static_cast<int>(line_idx + 1)));
if (idx < static_cast<size_t>(sample_cnt)) {
out_sampled_data->operator[](idx) = std::string(buffer, size);
Expand Down Expand Up @@ -207,8 +204,7 @@ class TextReader {
if (cur_sample_cnt < sample_cnt) {
out_sampled_data->emplace_back(buffer, size);
++cur_sample_cnt;
}
else {
} else {
const size_t idx = static_cast<size_t>(random.NextInt(0, static_cast<int>(out_used_data_indices->size())));
if (idx < static_cast<size_t>(sample_cnt)) {
out_sampled_data->operator[](idx) = std::string(buffer, size);
Expand Down Expand Up @@ -250,8 +246,7 @@ class TextReader {
++used_cnt;
}
last_line_ = "";
}
else {
} else {
if (filter_fun(used_cnt, total_cnt)) {
lines_.emplace_back(buffer_process + last_i, i - last_i);
++used_cnt;
Expand All @@ -263,8 +258,7 @@ class TextReader {
// skip end of line
while ((buffer_process[i] == '\n' || buffer_process[i] == '\r') && i < read_cnt) { ++i; }
last_i = i;
}
else {
} else {
++i;
}
}
Expand Down Expand Up @@ -299,8 +293,7 @@ class TextReader {
[&used_data_indices](INDEX_T used_cnt, INDEX_T total_cnt) {
if (static_cast<size_t>(used_cnt) < used_data_indices.size() && total_cnt == used_data_indices[used_cnt]) {
return true;
}
else {
} else {
return false;
}
});
Expand Down
12 changes: 4 additions & 8 deletions src/boosting/gbdt_model_text.cpp
Expand Up @@ -355,20 +355,17 @@ bool GBDT::LoadModelFromString(const char* buffer, size_t len) {
auto strs = Common::Split(cur_line.c_str(), '=');
if (strs.size() == 1) {
key_vals[strs[0]] = "";
}
else if (strs.size() == 2) {
} else if (strs.size() == 2) {
key_vals[strs[0]] = strs[1];
}
else if (strs.size() > 2) {
} else if (strs.size() > 2) {
if (strs[0] == "feature_names") {
key_vals[strs[0]] = cur_line.substr(std::strlen("feature_names="));
} else {
// Use first 128 chars to avoid exceed the message buffer.
Log::Fatal("Wrong line at model file: %s", cur_line.substr(0, std::min<size_t>(128, cur_line.size())).c_str());
}
}
}
else {
} else {
break;
}
}
Expand Down Expand Up @@ -450,8 +447,7 @@ bool GBDT::LoadModelFromString(const char* buffer, size_t len) {
size_t used_len = 0;
models_.emplace_back(new Tree(p, &used_len));
p += used_len;
}
else {
} else {
break;
}
}
Expand Down
5 changes: 2 additions & 3 deletions src/c_api.cpp
Expand Up @@ -713,7 +713,6 @@ int LGBM_DatasetCreateFromCSRFunc(void* get_row_funptr,
const char* parameters,
const DatasetHandle reference,
DatasetHandle* out) {

API_BEGIN();

auto get_row_fun = *static_cast<std::function<void(int idx, std::vector<std::pair<int, double>>&)>*>(get_row_funptr);
Expand Down Expand Up @@ -758,7 +757,7 @@ int LGBM_DatasetCreateFromCSRFunc(void* get_row_funptr,
ret->CreateValid(
reinterpret_cast<const Dataset*>(reference));
}

OMP_INIT_EX();
std::vector<std::pair<int, double>> threadBuffer;
#pragma omp parallel for schedule(static) private(threadBuffer)
Expand Down Expand Up @@ -970,7 +969,7 @@ int LGBM_DatasetGetField(DatasetHandle handle,
} else if (dataset->GetDoubleField(field_name, out_len, reinterpret_cast<const double**>(out_ptr))) {
*out_type = C_API_DTYPE_FLOAT64;
is_success = true;
} else if(dataset->GetInt8Field(field_name, out_len, reinterpret_cast<const int8_t**>(out_ptr))){
} else if (dataset->GetInt8Field(field_name, out_len, reinterpret_cast<const int8_t**>(out_ptr))) {
*out_type = C_API_DTYPE_INT8;
is_success = true;
}
Expand Down
61 changes: 30 additions & 31 deletions src/io/dataset.cpp
Expand Up @@ -578,11 +578,10 @@ bool Dataset::GetDoubleField(const char* field_name, data_size_t* out_len, const
if (name == std::string("init_score")) {
*out_ptr = metadata_.init_score();
*out_len = static_cast<data_size_t>(metadata_.num_init_score());
} else if (name == std::string("feature_penalty")){
} else if (name == std::string("feature_penalty")) {
*out_ptr = feature_penalty_.data();
*out_len = static_cast<data_size_t>(feature_penalty_.size());
}
else {
} else {
return false;
}
return true;
Expand Down Expand Up @@ -707,7 +706,7 @@ void Dataset::SaveBinaryFile(const char* bin_filename) {
}
}

void Dataset::DumpTextFile(const char* text_filename){
void Dataset::DumpTextFile(const char* text_filename) {
FILE* file = NULL;
#if _MSC_VER
fopen_s(&file, text_filename, "wt");
Expand All @@ -719,36 +718,36 @@ void Dataset::DumpTextFile(const char* text_filename){
fprintf(file, "num_groups: %d\n", num_groups_);
fprintf(file, "num_data: %d\n", num_data_);
fprintf(file, "feature_names: ");
for(auto n : feature_names_){
for (auto n : feature_names_) {
fprintf(file, "%s, ", n.c_str());
}
fprintf(file, "\nmonotone_constraints: ");
for(auto i : monotone_types_){
for (auto i : monotone_types_) {
fprintf(file, "%d, ", i);
}
fprintf(file, "\nfeature_penalty: ");
for(auto i : feature_penalty_){
for (auto i : feature_penalty_) {
fprintf(file, "%lf, ", i);
}
fprintf(file, "\n");
for(auto n : feature_names_){
for (auto n : feature_names_) {
fprintf(file, "%s, ", n.c_str());
}
std::vector<std::unique_ptr<BinIterator>> iterators;
iterators.reserve(num_features_);
for(int j = 0; j < num_features_; ++j){
for (int j = 0; j < num_features_; ++j) {
auto group_idx = feature2group_[j];
auto sub_idx = feature2subfeature_[j];
iterators.emplace_back(feature_groups_[group_idx]->SubFeatureIterator(sub_idx));
}
for(data_size_t i = 0; i < num_data_; ++i){
for (data_size_t i = 0; i < num_data_; ++i) {
fprintf(file, "\n");
for(int j = 0; j < num_total_features_; ++j){
for (int j = 0; j < num_total_features_; ++j) {
auto inner_feature_idx = used_feature_map_[j];
if(inner_feature_idx < 0){
fprintf(file, "NA, ");
if (inner_feature_idx < 0) {
fprintf(file, "NA, ");
} else {
fprintf(file, "%d, ", iterators[inner_feature_idx]->RawGet(i));
fprintf(file, "%d, ", iterators[inner_feature_idx]->RawGet(i));
}
}
}
Expand Down Expand Up @@ -947,50 +946,50 @@ void Dataset::FixHistogram(int feature_idx, double sum_gradient, double sum_hess
}

template<typename T>
void PushVector(std::vector<T>& dest, const std::vector<T>& src){
void PushVector(std::vector<T>& dest, const std::vector<T>& src) {
dest.reserve(dest.size() + src.size());
for(auto i : src){
for (auto i : src) {
dest.push_back(i);
}
}

template<typename T>
void PushOffset(std::vector<T>& dest, const std::vector<T>& src, const T& offset){
void PushOffset(std::vector<T>& dest, const std::vector<T>& src, const T& offset) {
dest.reserve(dest.size() + src.size());
for(auto i : src){
for (auto i : src) {
dest.push_back(i + offset);
}
}

template<typename T>
void PushClearIfEmpty(std::vector<T>& dest, const size_t dest_len, const std::vector<T>& src, const size_t src_len, const T& deflt){
if(!dest.empty() && !src.empty()){
void PushClearIfEmpty(std::vector<T>& dest, const size_t dest_len, const std::vector<T>& src, const size_t src_len, const T& deflt) {
if (!dest.empty() && !src.empty()) {
PushVector(dest, src);
} else if(!dest.empty() && src.empty()){
for(size_t i = 0; i < src_len; ++i){
} else if (!dest.empty() && src.empty()) {
for (size_t i = 0; i < src_len; ++i) {
dest.push_back(deflt);
}
} else if(dest.empty() && !src.empty()){
for(size_t i = 0; i < dest_len; ++i){
} else if (dest.empty() && !src.empty()) {
for (size_t i = 0; i < dest_len; ++i) {
dest.push_back(deflt);
}
PushVector(dest, src);
}
}

void Dataset::addFeaturesFrom(Dataset* other){
if(other->num_data_ != num_data_){
void Dataset::addFeaturesFrom(Dataset* other) {
if (other->num_data_ != num_data_) {
throw std::runtime_error("Cannot add features from other Dataset with a different number of rows");
}
PushVector(feature_names_, other->feature_names_);
PushVector(feature2subfeature_, other->feature2subfeature_);
PushVector(group_feature_cnt_, other->group_feature_cnt_);
feature_groups_.reserve(other->feature_groups_.size());
for(auto& fg : other->feature_groups_){
for (auto& fg : other->feature_groups_) {
feature_groups_.emplace_back(new FeatureGroup(*fg));
}
for(auto feature_idx : other->used_feature_map_){
if(feature_idx >= 0){
for (auto feature_idx : other->used_feature_map_) {
if (feature_idx >= 0) {
used_feature_map_.push_back(feature_idx + num_features_);
} else {
used_feature_map_.push_back(-1); // Unused feature.
Expand All @@ -1000,14 +999,14 @@ void Dataset::addFeaturesFrom(Dataset* other){
PushOffset(feature2group_, other->feature2group_, num_groups_);
auto bin_offset = group_bin_boundaries_.back();
// Skip the leading 0 when copying group_bin_boundaries.
for(auto i = other->group_bin_boundaries_.begin()+1; i < other->group_bin_boundaries_.end(); ++i){
for (auto i = other->group_bin_boundaries_.begin()+1; i < other->group_bin_boundaries_.end(); ++i) {
group_bin_boundaries_.push_back(*i + bin_offset);
}
PushOffset(group_feature_start_, other->group_feature_start_, num_features_);

PushClearIfEmpty(monotone_types_, num_total_features_, other->monotone_types_, other->num_total_features_, (int8_t)0);
PushClearIfEmpty(feature_penalty_, num_total_features_, other->feature_penalty_, other->num_total_features_, 1.0);

num_features_ += other->num_features_;
num_total_features_ += other->num_total_features_;
num_groups_ += other->num_groups_;
Expand Down
6 changes: 2 additions & 4 deletions src/io/dataset_loader.cpp
Expand Up @@ -390,8 +390,7 @@ Dataset* DatasetLoader::LoadFromBinFile(const char* data_filename, const char* b
dataset->monotone_types_[inner_fidx] = config_.monotone_constraints[i];
}
}
}
else {
} else {
const int8_t* tmp_ptr_monotone_type = reinterpret_cast<const int8_t*>(mem_ptr);
dataset->monotone_types_.clear();
for (int i = 0; i < dataset->num_features_; ++i) {
Expand All @@ -413,8 +412,7 @@ Dataset* DatasetLoader::LoadFromBinFile(const char* data_filename, const char* b
dataset->feature_penalty_[inner_fidx] = config_.feature_contri[i];
}
}
}
else {
} else {
const double* tmp_ptr_feature_penalty = reinterpret_cast<const double*>(mem_ptr);
dataset->feature_penalty_.clear();
for (int i = 0; i < dataset->num_features_; ++i) {
Expand Down
2 changes: 1 addition & 1 deletion src/io/dense_bin.hpp
Expand Up @@ -322,7 +322,7 @@ class DenseBin: public Bin {
};

template<typename VAL_T>
DenseBin<VAL_T>* DenseBin<VAL_T>::Clone(){
DenseBin<VAL_T>* DenseBin<VAL_T>::Clone() {
return new DenseBin<VAL_T>(*this);
}

Expand Down
4 changes: 2 additions & 2 deletions src/io/dense_nbits_bin.hpp
Expand Up @@ -369,8 +369,8 @@ class Dense4bitsBin : public Bin {

protected:
Dense4bitsBin(const Dense4bitsBin& other)
: num_data_(other.num_data_), data_(other.data_), buf_(other.buf_){}
: num_data_(other.num_data_), data_(other.data_), buf_(other.buf_) {}

data_size_t num_data_;
std::vector<uint8_t> data_;
std::vector<uint8_t> buf_;
Expand Down

0 comments on commit 3c999be

Please sign in to comment.