Skip to content

Commit

Permalink
fix many cpp lint errors (#2426)
Browse files Browse the repository at this point in the history
* fix many cpp lint errors

* indent

* fix bug

* fix more

* fix gpu

* more fixes
  • Loading branch information
guolinke committed Sep 22, 2019
1 parent 4f89cc1 commit f1a1486
Show file tree
Hide file tree
Showing 39 changed files with 228 additions and 261 deletions.
24 changes: 12 additions & 12 deletions include/LightGBM/R_object_helper.h
Expand Up @@ -123,26 +123,26 @@ typedef struct VECTOR_SER {

typedef union { VECTOR_SER s; double align; } SEXPREC_ALIGN;

#define DATAPTR(x) (((SEXPREC_ALIGN *) (x)) + 1)
#define DATAPTR(x) ((reinterpret_cast<SEXPREC_ALIGN*>(x)) + 1)

#define R_CHAR_PTR(x) ((char *) DATAPTR(x))
#define R_CHAR_PTR(x) (reinterpret_cast<char*>DATAPTR(x))

#define R_INT_PTR(x) ((int *) DATAPTR(x))
#define R_INT_PTR(x) (reinterpret_cast<int*> DATAPTR(x))

#define R_INT64_PTR(x) ((int64_t *) DATAPTR(x))
#define R_INT64_PTR(x) (reinterpret_cast<int64_t*> DATAPTR(x))

#define R_REAL_PTR(x) ((double *) DATAPTR(x))
#define R_REAL_PTR(x) (reinterpret_cast<double*> DATAPTR(x))

#define R_AS_INT(x) (*((int *) DATAPTR(x)))
#define R_AS_INT(x) (*(reinterpret_cast<int*> DATAPTR(x)))

#define R_AS_INT64(x) (*((int64_t *) DATAPTR(x)))
#define R_AS_INT64(x) (*(reinterpret_cast<int64_t*> DATAPTR(x)))

#define R_IS_NULL(x) ((*(LGBM_SE)(x)).sxpinfo.type == 0)
#define R_IS_NULL(x) ((*reinterpret_cast<LGBM_SE>(x)).sxpinfo.type == 0)

// 64bit pointer
#if INTPTR_MAX == INT64_MAX

#define R_ADDR(x) ((int64_t *) DATAPTR(x))
#define R_ADDR(x) (reinterpret_cast<int64_t*> DATAPTR(x))

inline void R_SET_PTR(LGBM_SE x, void* ptr) {
if (ptr == nullptr) {
Expand All @@ -156,7 +156,7 @@ inline void* R_GET_PTR(LGBM_SE x) {
if (R_IS_NULL(x)) {
return nullptr;
} else {
auto ret = (void *)(R_ADDR(x)[0]);
auto ret = reinterpret_cast<void*>(R_ADDR(x)[0]);
if (ret == NULL) {
ret = nullptr;
}
Expand All @@ -166,7 +166,7 @@ inline void* R_GET_PTR(LGBM_SE x) {

#else

#define R_ADDR(x) ((int32_t *) DATAPTR(x))
#define R_ADDR(x) (reinterpret_cast<int32_t*> DATAPTR(x))

inline void R_SET_PTR(LGBM_SE x, void* ptr) {
if (ptr == nullptr) {
Expand All @@ -180,7 +180,7 @@ inline void* R_GET_PTR(LGBM_SE x) {
if (R_IS_NULL(x)) {
return nullptr;
} else {
auto ret = (void *)(R_ADDR(x)[0]);
auto ret = reinterpret_cast<void*>(R_ADDR(x)[0]);
if (ret == NULL) {
ret = nullptr;
}
Expand Down
2 changes: 1 addition & 1 deletion include/LightGBM/config.h
Expand Up @@ -75,7 +75,7 @@ struct Config {
const std::unordered_map<std::string, std::string>& params,
const std::string& name, bool* out);

static void KV2Map(std::unordered_map<std::string, std::string>& params, const char* kv);
static void KV2Map(std::unordered_map<std::string, std::string>* params, const char* kv);
static std::unordered_map<std::string, std::string> Str2Map(const char* parameters);

#pragma region Parameters
Expand Down
4 changes: 2 additions & 2 deletions include/LightGBM/dataset.h
Expand Up @@ -289,7 +289,7 @@ class Dataset {
LIGHTGBM_EXPORT Dataset(data_size_t num_data);

void Construct(
std::vector<std::unique_ptr<BinMapper>>& bin_mappers,
std::vector<std::unique_ptr<BinMapper>>* bin_mappers,
int** sample_non_zero_indices,
const int* num_per_col,
size_t total_sample_cnt,
Expand Down Expand Up @@ -407,7 +407,7 @@ class Dataset {
void ConstructHistograms(const std::vector<int8_t>& is_feature_used,
const data_size_t* data_indices, data_size_t num_data,
int leaf_idx,
std::vector<std::unique_ptr<OrderedBin>>& ordered_bins,
std::vector<std::unique_ptr<OrderedBin>>* ordered_bins,
const score_t* gradients, const score_t* hessians,
score_t* ordered_gradients, score_t* ordered_hessians,
bool is_constant_hessian,
Expand Down
2 changes: 1 addition & 1 deletion include/LightGBM/dataset_loader.h
Expand Up @@ -52,7 +52,7 @@ class DatasetLoader {
void ConstructBinMappersFromTextData(int rank, int num_machines, const std::vector<std::string>& sample_data, const Parser* parser, Dataset* dataset);

/*! \brief Extract local features from memory */
void ExtractFeaturesFromMemory(std::vector<std::string>& text_data, const Parser* parser, Dataset* dataset);
void ExtractFeaturesFromMemory(std::vector<std::string>* text_data, const Parser* parser, Dataset* dataset);

/*! \brief Extract local features from file */
void ExtractFeaturesFromFile(const char* filename, const Parser* parser, const std::vector<data_size_t>& used_data_indices, Dataset* dataset);
Expand Down
12 changes: 6 additions & 6 deletions include/LightGBM/feature_group.h
Expand Up @@ -31,15 +31,15 @@ class FeatureGroup {
* \param sparse_threshold Threshold for treating a feature as a sparse feature
*/
FeatureGroup(int num_feature,
std::vector<std::unique_ptr<BinMapper>>& bin_mappers,
std::vector<std::unique_ptr<BinMapper>>* bin_mappers,
data_size_t num_data, double sparse_threshold, bool is_enable_sparse) : num_feature_(num_feature) {
CHECK(static_cast<int>(bin_mappers.size()) == num_feature);
CHECK(static_cast<int>(bin_mappers->size()) == num_feature);
// use bin at zero to store default_bin
num_total_bin_ = 1;
bin_offsets_.emplace_back(num_total_bin_);
int cnt_non_zero = 0;
for (int i = 0; i < num_feature_; ++i) {
bin_mappers_.emplace_back(bin_mappers[i].release());
bin_mappers_.emplace_back(bin_mappers->at(i).release());
auto num_bin = bin_mappers_[i]->num_bin();
if (bin_mappers_[i]->GetDefaultBin() == 0) {
num_bin -= 1;
Expand All @@ -54,14 +54,14 @@ class FeatureGroup {
}

FeatureGroup(int num_feature,
std::vector<std::unique_ptr<BinMapper>>& bin_mappers,
std::vector<std::unique_ptr<BinMapper>>* bin_mappers,
data_size_t num_data, bool is_sparse) : num_feature_(num_feature) {
CHECK(static_cast<int>(bin_mappers.size()) == num_feature);
CHECK(static_cast<int>(bin_mappers->size()) == num_feature);
// use bin at zero to store default_bin
num_total_bin_ = 1;
bin_offsets_.emplace_back(num_total_bin_);
for (int i = 0; i < num_feature_; ++i) {
bin_mappers_.emplace_back(bin_mappers[i].release());
bin_mappers_.emplace_back(bin_mappers->at(i).release());
auto num_bin = bin_mappers_[i]->num_bin();
if (bin_mappers_[i]->GetDefaultBin() == 0) {
num_bin -= 1;
Expand Down
20 changes: 9 additions & 11 deletions include/LightGBM/network.h
Expand Up @@ -166,7 +166,7 @@ class Network {
const ReduceFunction& reducer);

template<class T>
static T GlobalSyncUpByMin(T& local) {
static T GlobalSyncUpByMin(T local) {
T global = local;
Allreduce(reinterpret_cast<char*>(&local),
sizeof(local), sizeof(local),
Expand All @@ -189,7 +189,7 @@ class Network {
return global;
}
template<class T>
static T GlobalSyncUpByMax(T& local) {
static T GlobalSyncUpByMax(T local) {
T global = local;
Allreduce(reinterpret_cast<char*>(&local),
sizeof(local), sizeof(local),
Expand All @@ -213,7 +213,7 @@ class Network {
}

template<class T>
static T GlobalSyncUpBySum(T& local) {
static T GlobalSyncUpBySum(T local) {
T global = (T)0;
Allreduce(reinterpret_cast<char*>(&local),
sizeof(local), sizeof(local),
Expand All @@ -235,15 +235,15 @@ class Network {
}

template<class T>
static T GlobalSyncUpByMean(T& local) {
static T GlobalSyncUpByMean(T local) {
return static_cast<T>(GlobalSyncUpBySum(local) / num_machines_);
}

template<class T>
static void GlobalSum(std::vector<T>& local) {
std::vector<T> global(local.size(), 0);
Allreduce(reinterpret_cast<char*>(local.data()),
static_cast<comm_size_t>(sizeof(T) * local.size()), sizeof(T),
static std::vector<T> GlobalSum(std::vector<T>* local) {
std::vector<T> global(local->size(), 0);
Allreduce(reinterpret_cast<char*>(local->data()),
static_cast<comm_size_t>(sizeof(T) * local->size()), sizeof(T),
reinterpret_cast<char*>(global.data()),
[](const char* src, char* dst, int type_size, comm_size_t len) {
comm_size_t used_size = 0;
Expand All @@ -258,9 +258,7 @@ class Network {
used_size += type_size;
}
});
for (size_t i = 0; i < local.size(); ++i) {
local[i] = global[i];
}
return global;
}

private:
Expand Down
2 changes: 1 addition & 1 deletion include/LightGBM/tree_learner.h
Expand Up @@ -53,7 +53,7 @@ class TreeLearner {
* \return A trained tree
*/
virtual Tree* Train(const score_t* gradients, const score_t* hessians, bool is_constant_hessian,
Json& forced_split_json) = 0;
const Json& forced_split_json) = 0;

/*!
* \brief use a existing tree to fit the new gradients and hessians.
Expand Down
2 changes: 1 addition & 1 deletion include/LightGBM/utils/array_args.h
Expand Up @@ -116,7 +116,7 @@ class ArrayArgs {
std::vector<VAL_T>& ref = *arr;
VAL_T v = ref[end - 1];
for (;;) {
while (ref[++i] > v);
while (ref[++i] > v) {}
while (v > ref[--j]) { if (j == start) { break; } }
if (i >= j) { break; }
std::swap(ref[i], ref[j]);
Expand Down
53 changes: 14 additions & 39 deletions include/LightGBM/utils/common.h
Expand Up @@ -162,31 +162,6 @@ inline static const char* Atoi(const char* p, T* out) {
return p;
}

template <typename T>
inline void SplitToIntLike(const char *c_str, char delimiter,
std::vector<T> &ret) {
CHECK(ret.empty());
std::string str(c_str);
size_t i = 0;
size_t pos = 0;
while (pos < str.length()) {
if (str[pos] == delimiter) {
if (i < pos) {
ret.push_back({});
Atoi(str.substr(i, pos - i).c_str(), &ret.back());
}
++pos;
i = pos;
} else {
++pos;
}
}
if (i < pos) {
ret.push_back({});
Atoi(str.substr(i).c_str(), &ret.back());
}
}

template<typename T>
inline static double Pow(T base, int power) {
if (power < 0) {
Expand Down Expand Up @@ -664,10 +639,10 @@ std::vector<const T*> ConstPtrInVectorWrapper(const std::vector<std::unique_ptr<
}

template<typename T1, typename T2>
inline static void SortForPair(std::vector<T1>& keys, std::vector<T2>& values, size_t start, bool is_reverse = false) {
inline static void SortForPair(std::vector<T1>* keys, std::vector<T2>* values, size_t start, bool is_reverse = false) {
std::vector<std::pair<T1, T2>> arr;
for (size_t i = start; i < keys.size(); ++i) {
arr.emplace_back(keys[i], values[i]);
for (size_t i = start; i < keys->size(); ++i) {
arr.emplace_back(keys->at(i), values->at(i));
}
if (!is_reverse) {
std::stable_sort(arr.begin(), arr.end(), [](const std::pair<T1, T2>& a, const std::pair<T1, T2>& b) {
Expand All @@ -679,16 +654,16 @@ inline static void SortForPair(std::vector<T1>& keys, std::vector<T2>& values, s
});
}
for (size_t i = start; i < arr.size(); ++i) {
keys[i] = arr[i].first;
values[i] = arr[i].second;
keys->at(i) = arr[i].first;
values->at(i) = arr[i].second;
}
}

template <typename T>
inline static std::vector<T*> Vector2Ptr(std::vector<std::vector<T>>& data) {
std::vector<T*> ptr(data.size());
for (size_t i = 0; i < data.size(); ++i) {
ptr[i] = data[i].data();
inline static std::vector<T*> Vector2Ptr(std::vector<std::vector<T>>* data) {
std::vector<T*> ptr(data->size());
for (size_t i = 0; i < data->size(); ++i) {
ptr[i] = data->at(i).data();
}
return ptr;
}
Expand All @@ -715,7 +690,7 @@ inline static double AvoidInf(double x) {
}

inline static float AvoidInf(float x) {
if (std::isnan(x)){
if (std::isnan(x)) {
return 0.0f;
} else if (x >= 1e38) {
return 1e38f;
Expand Down Expand Up @@ -865,13 +840,13 @@ inline static std::vector<uint32_t> EmptyBitset(int n) {
}

template<typename T>
inline static void InsertBitset(std::vector<uint32_t>& vec, const T val) {
inline static void InsertBitset(std::vector<uint32_t>* vec, const T val) {
int i1 = val / 32;
int i2 = val % 32;
if (static_cast<int>(vec.size()) < i1 + 1) {
vec.resize(i1 + 1, 0);
if (static_cast<int>(vec->size()) < i1 + 1) {
vec->resize(i1 + 1, 0);
}
vec[i1] |= (1 << i2);
vec->at(i1) |= (1 << i2);
}

template<typename T>
Expand Down
2 changes: 1 addition & 1 deletion include/LightGBM/utils/random.h
Expand Up @@ -29,7 +29,7 @@ class Random {
/*!
* \brief Constructor, with specific seed
*/
Random(int seed) {
explicit Random(int seed) {
x = seed;
}
/*!
Expand Down
8 changes: 4 additions & 4 deletions include/LightGBM/utils/text_reader.h
Expand Up @@ -160,7 +160,7 @@ class TextReader {
return ret;
}

INDEX_T SampleFromFile(Random& random, INDEX_T sample_cnt, std::vector<std::string>* out_sampled_data) {
INDEX_T SampleFromFile(Random* random, INDEX_T sample_cnt, std::vector<std::string>* out_sampled_data) {
INDEX_T cur_sample_cnt = 0;
return ReadAllAndProcess(
[&]
Expand All @@ -169,7 +169,7 @@ class TextReader {
out_sampled_data->emplace_back(buffer, size);
++cur_sample_cnt;
} else {
const size_t idx = static_cast<size_t>(random.NextInt(0, static_cast<int>(line_idx + 1)));
const size_t idx = static_cast<size_t>(random->NextInt(0, static_cast<int>(line_idx + 1)));
if (idx < static_cast<size_t>(sample_cnt)) {
out_sampled_data->operator[](idx) = std::string(buffer, size);
}
Expand All @@ -195,7 +195,7 @@ class TextReader {
}

INDEX_T SampleAndFilterFromFile(const std::function<bool(INDEX_T)>& filter_fun, std::vector<INDEX_T>* out_used_data_indices,
Random& random, INDEX_T sample_cnt, std::vector<std::string>* out_sampled_data) {
Random* random, INDEX_T sample_cnt, std::vector<std::string>* out_sampled_data) {
INDEX_T cur_sample_cnt = 0;
out_used_data_indices->clear();
INDEX_T total_cnt = ReadAllAndProcess(
Expand All @@ -208,7 +208,7 @@ class TextReader {
out_sampled_data->emplace_back(buffer, size);
++cur_sample_cnt;
} else {
const size_t idx = static_cast<size_t>(random.NextInt(0, static_cast<int>(out_used_data_indices->size())));
const size_t idx = static_cast<size_t>(random->NextInt(0, static_cast<int>(out_used_data_indices->size())));
if (idx < static_cast<size_t>(sample_cnt)) {
out_sampled_data->operator[](idx) = std::string(buffer, size);
}
Expand Down
5 changes: 2 additions & 3 deletions src/application/application.cpp
Expand Up @@ -48,7 +48,7 @@ Application::~Application() {
void Application::LoadParameters(int argc, char** argv) {
std::unordered_map<std::string, std::string> params;
for (int i = 1; i < argc; ++i) {
Config::KV2Map(params, argv[i]);
Config::KV2Map(&params, argv[i]);
}
// check for alias
ParameterAlias::KeyAliasTransform(&params);
Expand All @@ -66,7 +66,7 @@ void Application::LoadParameters(int argc, char** argv) {
if (line.size() == 0) {
continue;
}
Config::KV2Map(params, line.c_str());
Config::KV2Map(&params, line.c_str());
}
} else {
Log::Warning("Config file %s doesn't exist, will ignore",
Expand All @@ -85,7 +85,6 @@ void Application::LoadData() {
std::unique_ptr<Predictor> predictor;
// prediction is needed if using input initial model(continued train)
PredictFunction predict_fun = nullptr;

// need to continue training
if (boosting_->NumberOfTotalModel() > 0 && config_.task != TaskType::KRefitTree) {
predictor.reset(new Predictor(boosting_.get(), -1, true, false, false, false, -1, -1));
Expand Down

0 comments on commit f1a1486

Please sign in to comment.