From 462612b42c6fe7bb1be1ccd8b72d77021b9f2d38 Mon Sep 17 00:00:00 2001 From: Nikita Titov Date: Wed, 6 Feb 2019 15:31:42 +0300 Subject: [PATCH] fixed modifiers indent (#1997) --- include/LightGBM/application.h | 4 +-- include/LightGBM/bin.h | 12 ++++----- include/LightGBM/boosting.h | 4 +-- include/LightGBM/config.h | 4 +-- include/LightGBM/dataset.h | 10 ++++---- include/LightGBM/dataset_loader.h | 4 +-- include/LightGBM/feature_group.h | 4 +-- include/LightGBM/json11.hpp | 4 +-- include/LightGBM/metric.h | 6 ++--- include/LightGBM/network.h | 8 +++--- include/LightGBM/objective_function.h | 2 +- include/LightGBM/tree.h | 4 +-- include/LightGBM/tree_learner.h | 2 +- include/LightGBM/utils/array_args.h | 2 +- include/LightGBM/utils/log.h | 4 +-- include/LightGBM/utils/openmp_wrapper.h | 5 ++-- include/LightGBM/utils/pipeline_reader.h | 2 +- include/LightGBM/utils/random.h | 4 +-- include/LightGBM/utils/text_reader.h | 4 +-- include/LightGBM/utils/threading.h | 2 +- src/application/predictor.hpp | 4 +-- src/boosting/dart.hpp | 4 +-- src/boosting/gbdt.h | 4 +-- src/boosting/goss.hpp | 4 +-- src/boosting/rf.hpp | 4 +-- src/boosting/score_updater.hpp | 4 +-- src/c_api.cpp | 9 ++++--- src/io/dense_bin.hpp | 9 ++++--- src/io/dense_nbits_bin.hpp | 9 ++++--- src/io/file_io.cpp | 4 +-- src/io/json11.cpp | 16 ++++++------ src/io/ordered_sparse_bin.hpp | 4 +-- src/io/parser.hpp | 12 ++++----- src/io/sparse_bin.hpp | 8 +++--- src/metric/binary_metric.hpp | 12 ++++----- src/metric/map_metric.hpp | 4 +-- src/metric/multiclass_metric.hpp | 8 +++--- src/metric/rank_metric.hpp | 4 +-- src/metric/regression_metric.hpp | 26 +++++++++---------- src/metric/xentropy_metric.hpp | 12 ++++----- src/network/linkers.h | 4 +-- src/network/socket_wrapper.hpp | 4 +-- src/objective/binary_objective.hpp | 4 +-- src/objective/multiclass_objective.hpp | 8 +++--- src/objective/rank_objective.hpp | 4 +-- src/objective/regression_objective.hpp | 32 ++++++++++++------------ src/objective/xentropy_objective.hpp | 6 ++--- src/treelearner/data_partition.hpp | 4 +-- src/treelearner/feature_histogram.hpp | 10 ++++---- src/treelearner/gpu_tree_learner.h | 8 +++--- src/treelearner/leaf_splits.hpp | 4 +-- src/treelearner/parallel_tree_learner.h | 19 +++++++------- src/treelearner/serial_tree_learner.h | 4 +-- src/treelearner/split_info.hpp | 4 +-- 54 files changed, 186 insertions(+), 181 deletions(-) diff --git a/include/LightGBM/application.h b/include/LightGBM/application.h index 01bf43b0c81..800426b4d5e 100644 --- a/include/LightGBM/application.h +++ b/include/LightGBM/application.h @@ -23,7 +23,7 @@ class Metric; * and save the score to disk. */ class Application { -public: + public: Application(int argc, char** argv); /*! \brief Destructor */ @@ -32,7 +32,7 @@ class Application { /*! \brief To call this funciton to run application*/ inline void Run(); -private: + private: /*! \brief Load parameters from command line and config file*/ void LoadParameters(int argc, char** argv); diff --git a/include/LightGBM/bin.h b/include/LightGBM/bin.h index 27f6d50e60a..069b9ec7e17 100644 --- a/include/LightGBM/bin.h +++ b/include/LightGBM/bin.h @@ -27,7 +27,7 @@ enum MissingType { /*! \brief Store data for one histogram bin */ struct HistogramBinEntry { -public: + public: /*! \brief Sum of gradients on this bin */ double sum_gradients = 0.0f; /*! \brief Sum of hessians on this bin */ @@ -59,7 +59,7 @@ struct HistogramBinEntry { /*! \brief This class used to convert feature values into bin, * and store some meta information for bin*/ class BinMapper { -public: + public: BinMapper(); BinMapper(const BinMapper& other); explicit BinMapper(const void* memory); @@ -184,7 +184,7 @@ class BinMapper { } } -private: + private: /*! \brief Number of bins */ int num_bin_; MissingType missing_type_; @@ -217,7 +217,7 @@ class BinMapper { * So we only using ordered bin for sparse situations. */ class OrderedBin { -public: + public: /*! \brief virtual destructor */ virtual ~OrderedBin() {} @@ -265,7 +265,7 @@ class OrderedBin { /*! \brief Iterator for one bin column */ class BinIterator { -public: + public: /*! * \brief Get bin data on specific row index * \param idx Index of this data @@ -284,7 +284,7 @@ class BinIterator { * but it doesn't need to re-order operation, So it will be faster than OrderedBin for dense feature */ class Bin { -public: + public: /*! \brief virtual destructor */ virtual ~Bin() {} /*! diff --git a/include/LightGBM/boosting.h b/include/LightGBM/boosting.h index 13e854aea3e..9e2115684cf 100644 --- a/include/LightGBM/boosting.h +++ b/include/LightGBM/boosting.h @@ -20,7 +20,7 @@ struct PredictionEarlyStopInstance; * \brief The interface for Boosting */ class LIGHTGBM_EXPORT Boosting { -public: + public: /*! \brief virtual destructor */ virtual ~Boosting() {} @@ -294,7 +294,7 @@ class LIGHTGBM_EXPORT Boosting { }; class GBDTBase : public Boosting { -public: + public: virtual double GetLeafValue(int tree_idx, int leaf_idx) const = 0; virtual void SetLeafValue(int tree_idx, int leaf_idx, double val) = 0; }; diff --git a/include/LightGBM/config.h b/include/LightGBM/config.h index a53718c7a9a..dea6d432a72 100644 --- a/include/LightGBM/config.h +++ b/include/LightGBM/config.h @@ -25,7 +25,7 @@ enum TaskType { const int kDefaultNumLeaves = 31; struct Config { -public: + public: std::string ToString() const; /*! * \brief Get string value by specific name of key @@ -772,7 +772,7 @@ struct Config { static std::unordered_map alias_table; static std::unordered_set parameter_set; -private: + private: void CheckParamConflict(); void GetMembersFromString(const std::unordered_map& params); std::string SaveMembersToString() const; diff --git a/include/LightGBM/dataset.h b/include/LightGBM/dataset.h index 3664fbb8220..520b9834d0a 100644 --- a/include/LightGBM/dataset.h +++ b/include/LightGBM/dataset.h @@ -34,7 +34,7 @@ class DatasetLoader; * 5. Initial score. optional. if exsitng, the model will boost from this score, otherwise will start from 0. */ class Metadata { -public: + public: /*! * \brief Null costructor */ @@ -206,7 +206,7 @@ class Metadata { /*! \brief Disable copy */ Metadata(const Metadata&) = delete; -private: + private: /*! \brief Load initial scores from file */ void LoadInitialScore(const char* initscore_file); /*! \brief Load wights from file */ @@ -247,7 +247,7 @@ class Metadata { /*! \brief Interface for Parser */ class Parser { -public: + public: /*! \brief virtual destructor */ virtual ~Parser() {} @@ -276,7 +276,7 @@ class Parser { * which are used to traning or validation */ class Dataset { -public: + public: friend DatasetLoader; LIGHTGBM_EXPORT Dataset(); @@ -581,7 +581,7 @@ class Dataset { /*! \brief Disable copy */ Dataset(const Dataset&) = delete; -private: + private: std::string data_filename_; /*! \brief Store used features */ std::vector> feature_groups_; diff --git a/include/LightGBM/dataset_loader.h b/include/LightGBM/dataset_loader.h index 8e61de6b433..36ee5e43b3a 100644 --- a/include/LightGBM/dataset_loader.h +++ b/include/LightGBM/dataset_loader.h @@ -6,7 +6,7 @@ namespace LightGBM { class DatasetLoader { -public: + public: LIGHTGBM_EXPORT DatasetLoader(const Config& io_config, const PredictFunction& predict_fun, int num_class, const char* filename); LIGHTGBM_EXPORT ~DatasetLoader(); @@ -28,7 +28,7 @@ class DatasetLoader { /*! \brief Disable copy */ DatasetLoader(const DatasetLoader&) = delete; -private: + private: Dataset* LoadFromBinFile(const char* data_filename, const char* bin_filename, int rank, int num_machines, int* num_global_data, std::vector* used_data_indices); void SetHeader(const char* filename); diff --git a/include/LightGBM/feature_group.h b/include/LightGBM/feature_group.h index 5a3500aa5eb..d85543500b0 100644 --- a/include/LightGBM/feature_group.h +++ b/include/LightGBM/feature_group.h @@ -16,7 +16,7 @@ class Dataset; class DatasetLoader; /*! \brief Using to store data and providing some operations on one feature group*/ class FeatureGroup { -public: + public: friend Dataset; friend DatasetLoader; /*! @@ -214,7 +214,7 @@ class FeatureGroup { /*! \brief Disable copy */ FeatureGroup(const FeatureGroup&) = delete; -private: + private: /*! \brief Number of features */ int num_feature_; /*! \brief Bin mapper for sub features */ diff --git a/include/LightGBM/json11.hpp b/include/LightGBM/json11.hpp index cfd6ce16c83..7f7bcf8d626 100644 --- a/include/LightGBM/json11.hpp +++ b/include/LightGBM/json11.hpp @@ -204,13 +204,13 @@ class Json final { typedef std::initializer_list> shape; bool has_shape(const shape & types, std::string & err) const; -private: + private: std::shared_ptr m_ptr; }; // Internal class hierarchy - JsonValue objects are not exposed to users of this API. class JsonValue { -protected: + protected: friend class Json; friend class JsonInt; friend class JsonDouble; diff --git a/include/LightGBM/metric.h b/include/LightGBM/metric.h index 88ac266c2d8..72302525f86 100644 --- a/include/LightGBM/metric.h +++ b/include/LightGBM/metric.h @@ -18,7 +18,7 @@ namespace LightGBM { * Metric is used to calculate metric result */ class Metric { -public: + public: /*! \brief virtual destructor */ virtual ~Metric() {} @@ -57,7 +57,7 @@ class Metric { * \brief Static class, used to calculate DCG score */ class DCGCalculator { -public: + public: static void DefaultEvalAt(std::vector* eval_at); static void DefaultLabelGain(std::vector* label_gain); /*! @@ -123,7 +123,7 @@ class DCGCalculator { */ inline static double GetDiscount(data_size_t k) { return discount_[k]; } -private: + private: /*! \brief store gains for different label */ static std::vector label_gain_; /*! \brief store discount score for different position */ diff --git a/include/LightGBM/network.h b/include/LightGBM/network.h index a03a8b908e6..8045a7b8780 100644 --- a/include/LightGBM/network.h +++ b/include/LightGBM/network.h @@ -17,7 +17,7 @@ class Linkers; /*! \brief The network structure for all_gather */ class BruckMap { -public: + public: /*! \brief The communication times for one all gather operation */ int k; /*! \brief in_ranks[i] means the incomming rank on i-th communication */ @@ -51,7 +51,7 @@ enum RecursiveHalvingNodeType { /*! \brief Network structure for recursive halving algorithm */ class RecursiveHalvingMap { -public: + public: /*! \brief Communication times for one recursize halving algorithm */ int k; /*! \brief Node type */ @@ -84,7 +84,7 @@ class RecursiveHalvingMap { /*! \brief A static class that contains some collective communication algorithm */ class Network { -public: + public: /*! * \brief Initialize * \param config Config of network setting @@ -256,7 +256,7 @@ class Network { } } -private: + private: static void AllgatherBruck(char* input, const comm_size_t* block_start, const comm_size_t* block_len, char* output, comm_size_t all_size); static void AllgatherRecursiveDoubling(char* input, const comm_size_t* block_start, const comm_size_t* block_len, char* output, comm_size_t all_size); diff --git a/include/LightGBM/objective_function.h b/include/LightGBM/objective_function.h index d7baa458e5a..a2cb6d96a83 100644 --- a/include/LightGBM/objective_function.h +++ b/include/LightGBM/objective_function.h @@ -11,7 +11,7 @@ namespace LightGBM { * \brief The interface of Objective Function. */ class ObjectiveFunction { -public: + public: /*! \brief virtual destructor */ virtual ~ObjectiveFunction() {} diff --git a/include/LightGBM/tree.h b/include/LightGBM/tree.h index 2be212e1c4f..de36cabaf90 100644 --- a/include/LightGBM/tree.h +++ b/include/LightGBM/tree.h @@ -18,7 +18,7 @@ namespace LightGBM { * \brief Tree model */ class Tree { -public: + public: /*! * \brief Constructor * \param max_leaves The number of max leaves @@ -203,7 +203,7 @@ class Tree { void RecomputeMaxDepth(); -private: + private: std::string NumericalDecisionIfElse(int node) const; std::string CategoricalDecisionIfElse(int node) const; diff --git a/include/LightGBM/tree_learner.h b/include/LightGBM/tree_learner.h index 960e9ba9664..d679851f958 100644 --- a/include/LightGBM/tree_learner.h +++ b/include/LightGBM/tree_learner.h @@ -21,7 +21,7 @@ class ObjectiveFunction; * \brief Interface for tree learner */ class TreeLearner { -public: + public: /*! \brief virtual destructor */ virtual ~TreeLearner() {} diff --git a/include/LightGBM/utils/array_args.h b/include/LightGBM/utils/array_args.h index a4ad659f0f1..ef8db97c056 100644 --- a/include/LightGBM/utils/array_args.h +++ b/include/LightGBM/utils/array_args.h @@ -12,7 +12,7 @@ namespace LightGBM { */ template class ArrayArgs { -public: + public: inline static size_t ArgMaxMT(const std::vector& array) { int num_threads = 1; #pragma omp parallel diff --git a/include/LightGBM/utils/log.h b/include/LightGBM/utils/log.h index 7208895dcbe..26706ac6e5f 100644 --- a/include/LightGBM/utils/log.h +++ b/include/LightGBM/utils/log.h @@ -41,7 +41,7 @@ enum class LogLevel: int { * \brief A static Log class */ class Log { -public: + public: /*! * \brief Resets the minimal log level. It is INFO by default. * \param level The new minimal log level. @@ -83,7 +83,7 @@ class Log { throw std::runtime_error(std::string(str_buf)); } -private: + private: static void Write(LogLevel level, const char* level_str, const char *format, va_list val) { if (level <= GetLevel()) { // omit the message with low level // write to STDOUT diff --git a/include/LightGBM/utils/openmp_wrapper.h b/include/LightGBM/utils/openmp_wrapper.h index d48435794ef..23b1e0f5b0c 100644 --- a/include/LightGBM/utils/openmp_wrapper.h +++ b/include/LightGBM/utils/openmp_wrapper.h @@ -11,7 +11,7 @@ #include "log.h" class ThreadExceptionHelper { -public: + public: ThreadExceptionHelper() { ex_ptr_ = nullptr; } @@ -31,7 +31,8 @@ class ThreadExceptionHelper { if (ex_ptr_ != nullptr) { return; } ex_ptr_ = std::current_exception(); } -private: + + private: std::exception_ptr ex_ptr_; std::mutex lock_; }; diff --git a/include/LightGBM/utils/pipeline_reader.h b/include/LightGBM/utils/pipeline_reader.h index 057760a6809..ad69771fbca 100644 --- a/include/LightGBM/utils/pipeline_reader.h +++ b/include/LightGBM/utils/pipeline_reader.h @@ -18,7 +18,7 @@ namespace LightGBM { * \brief A pipeline file reader, use 2 threads, one read block from file, the other process the block */ class PipelineReader { -public: + public: /*! * \brief Read data from a file, use pipeline methods * \param filename Filename of data diff --git a/include/LightGBM/utils/random.h b/include/LightGBM/utils/random.h index d7086ff8a7f..9d50727dfe4 100644 --- a/include/LightGBM/utils/random.h +++ b/include/LightGBM/utils/random.h @@ -13,7 +13,7 @@ namespace LightGBM { * \brief A wrapper for random generator */ class Random { -public: + public: /*! * \brief Constructor, with random seed */ @@ -94,7 +94,7 @@ class Random { return ret; } -private: + private: inline int RandInt16() { x = (214013 * x + 2531011); return static_cast((x >> 16) & 0x7FFF); diff --git a/include/LightGBM/utils/text_reader.h b/include/LightGBM/utils/text_reader.h index d170668c56e..3ffb3d826de 100644 --- a/include/LightGBM/utils/text_reader.h +++ b/include/LightGBM/utils/text_reader.h @@ -19,7 +19,7 @@ namespace LightGBM { */ template class TextReader { -public: + public: /*! * \brief Constructor * \param filename Filename of data @@ -306,7 +306,7 @@ class TextReader { }); } -private: + private: /*! \brief Filename of text data */ const char* filename_; /*! \brief Cache the read text data */ diff --git a/include/LightGBM/utils/threading.h b/include/LightGBM/utils/threading.h index 312ac85bc21..0f3b8e57973 100644 --- a/include/LightGBM/utils/threading.h +++ b/include/LightGBM/utils/threading.h @@ -9,7 +9,7 @@ namespace LightGBM { class Threading { -public: + public: template static inline void For(INDEX_T start, INDEX_T end, const std::function& inner_fun) { int num_threads = 1; diff --git a/src/application/predictor.hpp b/src/application/predictor.hpp index 201f6316362..518542ce308 100644 --- a/src/application/predictor.hpp +++ b/src/application/predictor.hpp @@ -23,7 +23,7 @@ namespace LightGBM { * \brief Used to predict data with input model */ class Predictor { -public: + public: /*! * \brief Constructor * \param boosting Input boosting model @@ -207,7 +207,7 @@ class Predictor { predict_data_reader.ReadAllAndProcessParallel(process_fun); } -private: + private: void CopyToPredictBuffer(double* pred_buf, const std::vector>& features) { int loop_size = static_cast(features.size()); for (int i = 0; i < loop_size; ++i) { diff --git a/src/boosting/dart.hpp b/src/boosting/dart.hpp index fb6ab56f4f2..500d02fa7e6 100644 --- a/src/boosting/dart.hpp +++ b/src/boosting/dart.hpp @@ -15,7 +15,7 @@ namespace LightGBM { * \brief DART algorithm implementation. including Training, prediction, bagging. */ class DART: public GBDT { -public: + public: /*! * \brief Constructor */ @@ -84,7 +84,7 @@ class DART: public GBDT { return false; } -private: + private: /*! * \brief drop trees based on drop_rate */ diff --git a/src/boosting/gbdt.h b/src/boosting/gbdt.h index 930da8d7e75..4c744c54e6b 100644 --- a/src/boosting/gbdt.h +++ b/src/boosting/gbdt.h @@ -24,7 +24,7 @@ namespace LightGBM { * \brief GBDT algorithm implementation. including Training, prediction, bagging. */ class GBDT : public GBDTBase { -public: + public: /*! * \brief Constructor */ @@ -354,7 +354,7 @@ class GBDT : public GBDTBase { */ virtual const char* SubModelName() const override { return "tree"; } -protected: + protected: /*! * \brief Print eval result and check early stopping */ diff --git a/src/boosting/goss.hpp b/src/boosting/goss.hpp index 4e34c404f5d..6a310afaf29 100644 --- a/src/boosting/goss.hpp +++ b/src/boosting/goss.hpp @@ -24,7 +24,7 @@ std::chrono::duration re_init_tree_time; #endif class GOSS: public GBDT { -public: + public: /*! * \brief Constructor */ @@ -208,7 +208,7 @@ class GOSS: public GBDT { } } -private: + private: std::vector tmp_indice_right_; }; diff --git a/src/boosting/rf.hpp b/src/boosting/rf.hpp index 09f8f0be863..2366d85dae2 100644 --- a/src/boosting/rf.hpp +++ b/src/boosting/rf.hpp @@ -16,7 +16,7 @@ namespace LightGBM { * \brief Rondom Forest implementation */ class RF : public GBDT { -public: + public: RF() : GBDT() { average_output_ = true; } @@ -199,7 +199,7 @@ class RF : public GBDT { return true; }; -private: + private: std::vector tmp_grad_; std::vector tmp_hess_; std::vector init_scores_; diff --git a/src/boosting/score_updater.hpp b/src/boosting/score_updater.hpp index 88d220f5896..4c0ad708c29 100644 --- a/src/boosting/score_updater.hpp +++ b/src/boosting/score_updater.hpp @@ -15,7 +15,7 @@ namespace LightGBM { * \brief Used to store and update score for data */ class ScoreUpdater { -public: + public: /*! * \brief Constructor, will pass a const pointer of dataset * \param data This class will bind with this data set @@ -109,7 +109,7 @@ class ScoreUpdater { /*! \brief Disable copy */ ScoreUpdater(const ScoreUpdater&) = delete; -private: + private: /*! \brief Number of total data */ data_size_t num_data_; /*! \brief Pointer of data set */ diff --git a/src/c_api.cpp b/src/c_api.cpp index e1ebdbe2cab..4ffdb328b71 100644 --- a/src/c_api.cpp +++ b/src/c_api.cpp @@ -44,7 +44,7 @@ catch(...) { return LGBM_APIHandleException("unknown exception"); } \ return 0; class Booster { -public: + public: explicit Booster(const char* filename) { boosting_.reset(Boosting::CreateBoosting("gbdt", filename)); } @@ -323,7 +323,7 @@ class Booster { const Boosting* GetBoosting() const { return boosting_.get(); } -private: + private: const Dataset* train_data_; std::unique_ptr boosting_; /*! \brief All configs */ @@ -356,7 +356,7 @@ RowFunctionFromCSR(const void* indptr, int indptr_type, const int32_t* indices, // Row iterator of on column for CSC matrix class CSC_RowIterator { -public: + public: CSC_RowIterator(const void* col_ptr, int col_ptr_type, const int32_t* indices, const void* data, int data_type, int64_t ncol_ptr, int64_t nelem, int col_idx); ~CSC_RowIterator() {} @@ -364,7 +364,8 @@ class CSC_RowIterator { double Get(int idx); // return next non-zero pair, if index < 0, means no more data std::pair NextNonZero(); -private: + + private: int nonzero_idx_ = 0; int cur_idx_ = -1; double cur_val_ = 0.0f; diff --git a/src/io/dense_bin.hpp b/src/io/dense_bin.hpp index f9920862550..da442b36df0 100644 --- a/src/io/dense_bin.hpp +++ b/src/io/dense_bin.hpp @@ -14,7 +14,7 @@ class DenseBin; template class DenseBinIterator: public BinIterator { -public: + public: explicit DenseBinIterator(const DenseBin* bin_data, uint32_t min_bin, uint32_t max_bin, uint32_t default_bin) : bin_data_(bin_data), min_bin_(static_cast(min_bin)), max_bin_(static_cast(max_bin)), @@ -28,7 +28,8 @@ class DenseBinIterator: public BinIterator { inline uint32_t RawGet(data_size_t idx) override; inline uint32_t Get(data_size_t idx) override; inline void Reset(data_size_t) override { } -private: + + private: const DenseBin* bin_data_; VAL_T min_bin_; VAL_T max_bin_; @@ -41,7 +42,7 @@ class DenseBinIterator: public BinIterator { */ template class DenseBin: public Bin { -public: + public: friend DenseBinIterator; DenseBin(data_size_t num_data) : num_data_(num_data), data_(num_data_, static_cast(0)) { @@ -310,7 +311,7 @@ class DenseBin: public Bin { return sizeof(VAL_T) * num_data_; } -protected: + protected: data_size_t num_data_; std::vector data_; }; diff --git a/src/io/dense_nbits_bin.hpp b/src/io/dense_nbits_bin.hpp index d0ea43a609f..dc48b085ba9 100644 --- a/src/io/dense_nbits_bin.hpp +++ b/src/io/dense_nbits_bin.hpp @@ -12,7 +12,7 @@ namespace LightGBM { class Dense4bitsBin; class Dense4bitsBinIterator : public BinIterator { -public: + public: explicit Dense4bitsBinIterator(const Dense4bitsBin* bin_data, uint32_t min_bin, uint32_t max_bin, uint32_t default_bin) : bin_data_(bin_data), min_bin_(static_cast(min_bin)), max_bin_(static_cast(max_bin)), @@ -26,7 +26,8 @@ class Dense4bitsBinIterator : public BinIterator { inline uint32_t RawGet(data_size_t idx) override; inline uint32_t Get(data_size_t idx) override; inline void Reset(data_size_t) override {} -private: + + private: const Dense4bitsBin* bin_data_; uint8_t min_bin_; uint8_t max_bin_; @@ -35,7 +36,7 @@ class Dense4bitsBinIterator : public BinIterator { }; class Dense4bitsBin : public Bin { -public: + public: friend Dense4bitsBinIterator; Dense4bitsBin(data_size_t num_data) : num_data_(num_data) { @@ -362,7 +363,7 @@ class Dense4bitsBin : public Bin { return sizeof(uint8_t) * data_.size(); } -protected: + protected: data_size_t num_data_; std::vector data_; std::vector buf_; diff --git a/src/io/file_io.cpp b/src/io/file_io.cpp index f50d02ce6ea..0f0504fb000 100644 --- a/src/io/file_io.cpp +++ b/src/io/file_io.cpp @@ -42,7 +42,7 @@ struct LocalFile : VirtualFileReader, VirtualFileWriter { return fwrite(buffer, bytes, 1, file_) == 1 ? bytes : 0; } -private: + private: FILE* file_ = NULL; const std::string filename_; const std::string mode_; @@ -86,7 +86,7 @@ struct HDFSFile : VirtualFileReader, VirtualFileWriter { return FileOperation(data, bytes, &hdfsWrite); } -private: + private: template using fileOp = tSize(*)(hdfsFS, hdfsFile, BufferType, tSize); diff --git a/src/io/json11.cpp b/src/io/json11.cpp index 30b90faf559..d3b2061e7a7 100644 --- a/src/io/json11.cpp +++ b/src/io/json11.cpp @@ -147,7 +147,7 @@ void Json::dump(string &out) const { template class Value : public JsonValue { -protected: + protected: // Constructors explicit Value(const T &value) : m_value(value) {} explicit Value(T &&value) : m_value(move(value)) {} @@ -174,7 +174,7 @@ class JsonDouble final : public Value { int int_value() const override { return static_cast(m_value); } bool equals(const JsonValue * other) const override { return m_value == other->number_value(); } bool less(const JsonValue * other) const override { return m_value < other->number_value(); } -public: + public: explicit JsonDouble(double value) : Value(value) {} }; @@ -183,19 +183,19 @@ class JsonInt final : public Value { int int_value() const override { return m_value; } bool equals(const JsonValue * other) const override { return m_value == other->number_value(); } bool less(const JsonValue * other) const override { return m_value < other->number_value(); } -public: + public: explicit JsonInt(int value) : Value(value) {} }; class JsonBoolean final : public Value { bool bool_value() const override { return m_value; } -public: + public: explicit JsonBoolean(bool value) : Value(value) {} }; class JsonString final : public Value { const string &string_value() const override { return m_value; } -public: + public: explicit JsonString(const string &value) : Value(value) {} explicit JsonString(string &&value) : Value(move(value)) {} }; @@ -203,7 +203,7 @@ class JsonString final : public Value { class JsonArray final : public Value { const Json::array &array_items() const override { return m_value; } const Json & operator[](size_t i) const override; -public: + public: explicit JsonArray(const Json::array &value) : Value(value) {} explicit JsonArray(Json::array &&value) : Value(move(value)) {} }; @@ -211,13 +211,13 @@ class JsonArray final : public Value { class JsonObject final : public Value { const Json::object &object_items() const override { return m_value; } const Json & operator[](const string &key) const override; -public: + public: explicit JsonObject(const Json::object &value) : Value(value) {} explicit JsonObject(Json::object &&value) : Value(move(value)) {} }; class JsonNull final : public Value { -public: + public: JsonNull() : Value({}) {} }; diff --git a/src/io/ordered_sparse_bin.hpp b/src/io/ordered_sparse_bin.hpp index 80c0eb21187..b069da8884b 100644 --- a/src/io/ordered_sparse_bin.hpp +++ b/src/io/ordered_sparse_bin.hpp @@ -24,7 +24,7 @@ namespace LightGBM { */ template class OrderedSparseBin: public OrderedBin { -public: + public: /*! \brief Pair to store one bin entry */ struct SparsePair { data_size_t ridx; // data(row) index @@ -192,7 +192,7 @@ class OrderedSparseBin: public OrderedBin { /*! \brief Disable copy */ OrderedSparseBin(const OrderedSparseBin&) = delete; -private: + private: const SparseBin* bin_data_; /*! \brief Store non-zero pair , group by leaf */ std::vector ordered_pair_; diff --git a/src/io/parser.hpp b/src/io/parser.hpp index cc9cbe0c337..fa2752daf58 100644 --- a/src/io/parser.hpp +++ b/src/io/parser.hpp @@ -13,7 +13,7 @@ namespace LightGBM { class CSVParser: public Parser { -public: + public: explicit CSVParser(int label_idx, int total_columns) :label_idx_(label_idx), total_columns_(total_columns) { } @@ -45,13 +45,13 @@ class CSVParser: public Parser { return total_columns_; } -private: + private: int label_idx_ = 0; int total_columns_ = -1; }; class TSVParser: public Parser { -public: + public: explicit TSVParser(int label_idx, int total_columns) :label_idx_(label_idx), total_columns_(total_columns) { } @@ -81,13 +81,13 @@ class TSVParser: public Parser { return total_columns_; } -private: + private: int label_idx_ = 0; int total_columns_ = -1; }; class LibSVMParser: public Parser { -public: + public: explicit LibSVMParser(int label_idx) :label_idx_(label_idx) { if (label_idx > 0) { @@ -121,7 +121,7 @@ class LibSVMParser: public Parser { return -1; } -private: + private: int label_idx_ = 0; }; diff --git a/src/io/sparse_bin.hpp b/src/io/sparse_bin.hpp index 409c371f15e..8fa80ae1074 100644 --- a/src/io/sparse_bin.hpp +++ b/src/io/sparse_bin.hpp @@ -20,7 +20,7 @@ const size_t kNumFastIndex = 64; template class SparseBinIterator: public BinIterator { -public: + public: SparseBinIterator(const SparseBin* bin_data, uint32_t min_bin, uint32_t max_bin, uint32_t default_bin) : bin_data_(bin_data), min_bin_(static_cast(min_bin)), @@ -52,7 +52,7 @@ class SparseBinIterator: public BinIterator { inline void Reset(data_size_t idx) override; -private: + private: const SparseBin* bin_data_; data_size_t cur_pos_; data_size_t i_delta_; @@ -67,7 +67,7 @@ class OrderedSparseBin; template class SparseBin: public Bin { -public: + public: friend class SparseBinIterator; friend class OrderedSparseBin; @@ -407,7 +407,7 @@ class SparseBin: public Bin { GetFastIndex(); } -protected: + protected: data_size_t num_data_; std::vector deltas_; std::vector vals_; diff --git a/src/metric/binary_metric.hpp b/src/metric/binary_metric.hpp index 2db98e3ead6..1758bd6fb45 100644 --- a/src/metric/binary_metric.hpp +++ b/src/metric/binary_metric.hpp @@ -18,7 +18,7 @@ namespace LightGBM { */ template class BinaryMetric: public Metric { -public: + public: explicit BinaryMetric(const Config&) { } @@ -92,7 +92,7 @@ class BinaryMetric: public Metric { return std::vector(1, loss); } -private: + private: /*! \brief Number of data */ data_size_t num_data_; /*! \brief Pointer of label */ @@ -109,7 +109,7 @@ class BinaryMetric: public Metric { * \brief Log loss metric for binary classification task. */ class BinaryLoglossMetric: public BinaryMetric { -public: + public: explicit BinaryLoglossMetric(const Config& config) :BinaryMetric(config) {} inline static double LossOnPoint(label_t label, double prob) { @@ -133,7 +133,7 @@ class BinaryLoglossMetric: public BinaryMetric { * \brief Error rate metric for binary classification task. */ class BinaryErrorMetric: public BinaryMetric { -public: + public: explicit BinaryErrorMetric(const Config& config) :BinaryMetric(config) {} inline static double LossOnPoint(label_t label, double prob) { @@ -153,7 +153,7 @@ class BinaryErrorMetric: public BinaryMetric { * \brief Auc Metric for binary classification task. */ class AUCMetric: public Metric { -public: + public: explicit AUCMetric(const Config&) { } @@ -246,7 +246,7 @@ class AUCMetric: public Metric { return std::vector(1, auc); } -private: + private: /*! \brief Number of data */ data_size_t num_data_; /*! \brief Pointer of label */ diff --git a/src/metric/map_metric.hpp b/src/metric/map_metric.hpp index 2e346c616a3..4143c56708d 100644 --- a/src/metric/map_metric.hpp +++ b/src/metric/map_metric.hpp @@ -13,7 +13,7 @@ namespace LightGBM { class MapMetric:public Metric { -public: + public: explicit MapMetric(const Config& config) { // get eval position eval_at_ = config.eval_at; @@ -142,7 +142,7 @@ class MapMetric:public Metric { return result; } -private: + private: /*! \brief Number of data */ data_size_t num_data_; /*! \brief Pointer of label */ diff --git a/src/metric/multiclass_metric.hpp b/src/metric/multiclass_metric.hpp index be2375c69e7..3c661feb4ae 100644 --- a/src/metric/multiclass_metric.hpp +++ b/src/metric/multiclass_metric.hpp @@ -14,7 +14,7 @@ namespace LightGBM { */ template class MulticlassMetric: public Metric { -public: + public: explicit MulticlassMetric(const Config& config) { num_class_ = config.num_class; } @@ -112,7 +112,7 @@ class MulticlassMetric: public Metric { return std::vector(1, loss); } -private: + private: /*! \brief Number of data */ data_size_t num_data_; /*! \brief Pointer of label */ @@ -128,7 +128,7 @@ class MulticlassMetric: public Metric { /*! \brief L2 loss for multiclass task */ class MultiErrorMetric: public MulticlassMetric { -public: + public: explicit MultiErrorMetric(const Config& config) :MulticlassMetric(config) {} inline static double LossOnPoint(label_t label, std::vector& score) { @@ -148,7 +148,7 @@ class MultiErrorMetric: public MulticlassMetric { /*! \brief Logloss for multiclass task */ class MultiSoftmaxLoglossMetric: public MulticlassMetric { -public: + public: explicit MultiSoftmaxLoglossMetric(const Config& config) :MulticlassMetric(config) {} inline static double LossOnPoint(label_t label, std::vector& score) { diff --git a/src/metric/rank_metric.hpp b/src/metric/rank_metric.hpp index f46aa855f4a..b807131c7e4 100644 --- a/src/metric/rank_metric.hpp +++ b/src/metric/rank_metric.hpp @@ -13,7 +13,7 @@ namespace LightGBM { class NDCGMetric:public Metric { -public: + public: explicit NDCGMetric(const Config& config) { // get eval position eval_at_ = config.eval_at; @@ -143,7 +143,7 @@ class NDCGMetric:public Metric { return result; } -private: + private: /*! \brief Number of data */ data_size_t num_data_; /*! \brief Pointer of label */ diff --git a/src/metric/regression_metric.hpp b/src/metric/regression_metric.hpp index 1616a0b3437..27025d434b8 100644 --- a/src/metric/regression_metric.hpp +++ b/src/metric/regression_metric.hpp @@ -14,7 +14,7 @@ namespace LightGBM { */ template class RegressionMetric: public Metric { -public: + public: explicit RegressionMetric(const Config& config) :config_(config) { } @@ -95,7 +95,7 @@ class RegressionMetric: public Metric { inline static void CheckLabel(label_t) { } -private: + private: /*! \brief Number of data */ data_size_t num_data_; /*! \brief Pointer of label */ @@ -111,7 +111,7 @@ class RegressionMetric: public Metric { /*! \brief RMSE loss for regression task */ class RMSEMetric: public RegressionMetric { -public: + public: explicit RMSEMetric(const Config& config) :RegressionMetric(config) {} inline static double LossOnPoint(label_t label, double score, const Config&) { @@ -130,7 +130,7 @@ class RMSEMetric: public RegressionMetric { /*! \brief L2 loss for regression task */ class L2Metric: public RegressionMetric { -public: + public: explicit L2Metric(const Config& config) :RegressionMetric(config) {} inline static double LossOnPoint(label_t label, double score, const Config&) { @@ -144,7 +144,7 @@ class L2Metric: public RegressionMetric { /*! \brief L2 loss for regression task */ class QuantileMetric : public RegressionMetric { -public: + public: explicit QuantileMetric(const Config& config) :RegressionMetric(config) { } @@ -165,7 +165,7 @@ class QuantileMetric : public RegressionMetric { /*! \brief L1 loss for regression task */ class L1Metric: public RegressionMetric { -public: + public: explicit L1Metric(const Config& config) :RegressionMetric(config) {} inline static double LossOnPoint(label_t label, double score, const Config&) { @@ -178,7 +178,7 @@ class L1Metric: public RegressionMetric { /*! \brief Huber loss for regression task */ class HuberLossMetric: public RegressionMetric { -public: + public: explicit HuberLossMetric(const Config& config) :RegressionMetric(config) { } @@ -199,7 +199,7 @@ class HuberLossMetric: public RegressionMetric { /*! \brief Fair loss for regression task */ // http://research.microsoft.com/en-us/um/people/zhang/INRIA/Publis/Tutorial-Estim/node24.html class FairLossMetric: public RegressionMetric { -public: + public: explicit FairLossMetric(const Config& config) :RegressionMetric(config) { } @@ -216,7 +216,7 @@ class FairLossMetric: public RegressionMetric { /*! \brief Poisson regression loss for regression task */ class PoissonMetric: public RegressionMetric { -public: + public: explicit PoissonMetric(const Config& config) :RegressionMetric(config) { } @@ -235,7 +235,7 @@ class PoissonMetric: public RegressionMetric { /*! \brief Mape regression loss for regression task */ class MAPEMetric : public RegressionMetric { -public: + public: explicit MAPEMetric(const Config& config) :RegressionMetric(config) { } @@ -248,7 +248,7 @@ class MAPEMetric : public RegressionMetric { }; class GammaMetric : public RegressionMetric { -public: + public: explicit GammaMetric(const Config& config) :RegressionMetric(config) { } @@ -271,7 +271,7 @@ class GammaMetric : public RegressionMetric { class GammaDevianceMetric : public RegressionMetric { -public: + public: explicit GammaDevianceMetric(const Config& config) :RegressionMetric(config) { } @@ -292,7 +292,7 @@ class GammaDevianceMetric : public RegressionMetric { }; class TweedieMetric : public RegressionMetric { -public: + public: explicit TweedieMetric(const Config& config) :RegressionMetric(config) { } diff --git a/src/metric/xentropy_metric.hpp b/src/metric/xentropy_metric.hpp index eee08b21b0b..c06100972b5 100644 --- a/src/metric/xentropy_metric.hpp +++ b/src/metric/xentropy_metric.hpp @@ -65,7 +65,7 @@ namespace LightGBM { // CrossEntropyMetric : "xentropy" : (optional) weights are used linearly // class CrossEntropyMetric : public Metric { -public: + public: explicit CrossEntropyMetric(const Config&) {} virtual ~CrossEntropyMetric() {} @@ -142,7 +142,7 @@ class CrossEntropyMetric : public Metric { return -1.0f; // negative means smaller loss is better, positive means larger loss is better } -private: + private: /*! \brief Number of data points */ data_size_t num_data_; /*! \brief Pointer to label */ @@ -160,7 +160,7 @@ class CrossEntropyMetric : public Metric { // ATTENTION: Supposed to be used when the objective also is "xentlambda" // class CrossEntropyLambdaMetric : public Metric { -public: + public: explicit CrossEntropyLambdaMetric(const Config&) {} virtual ~CrossEntropyLambdaMetric() {} @@ -228,7 +228,7 @@ class CrossEntropyLambdaMetric : public Metric { return -1.0f; } -private: + private: /*! \brief Number of data points */ data_size_t num_data_; /*! \brief Pointer to label */ @@ -243,7 +243,7 @@ class CrossEntropyLambdaMetric : public Metric { // KullbackLeiblerDivergence : "kldiv" : (optional) weights are used linearly // class KullbackLeiblerDivergence : public Metric { -public: + public: explicit KullbackLeiblerDivergence(const Config&) {} virtual ~KullbackLeiblerDivergence() {} @@ -336,7 +336,7 @@ class KullbackLeiblerDivergence : public Metric { return -1.0f; } -private: + private: /*! \brief Number of data points */ data_size_t num_data_; /*! \brief Pointer to label */ diff --git a/src/network/linkers.h b/src/network/linkers.h index de2b13e30f6..c01db9300cc 100644 --- a/src/network/linkers.h +++ b/src/network/linkers.h @@ -32,7 +32,7 @@ namespace LightGBM { * This class will wrap all linkers to other machines if needs */ class Linkers { -public: + public: Linkers() { is_init_ = false; } @@ -136,7 +136,7 @@ class Linkers { #endif // USE_SOCKET -private: + private: /*! \brief Rank of local machine */ int rank_; /*! \brief Total number machines */ diff --git a/src/network/socket_wrapper.hpp b/src/network/socket_wrapper.hpp index f8635b1c2b4..fc972ee6eb3 100644 --- a/src/network/socket_wrapper.hpp +++ b/src/network/socket_wrapper.hpp @@ -86,7 +86,7 @@ const bool kNoDelay = true; } class TcpSocket { -public: + public: TcpSocket() { sockfd_ = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); if (sockfd_ == INVALID_SOCKET) { @@ -291,7 +291,7 @@ class TcpSocket { } } -private: + private: SOCKET sockfd_; }; diff --git a/src/objective/binary_objective.hpp b/src/objective/binary_objective.hpp index 8a2bb7a71a4..aa209e0d266 100644 --- a/src/objective/binary_objective.hpp +++ b/src/objective/binary_objective.hpp @@ -11,7 +11,7 @@ namespace LightGBM { * \brief Objective function for binary classification */ class BinaryLogloss: public ObjectiveFunction { -public: + public: explicit BinaryLogloss(const Config& config, std::function is_pos = nullptr) { sigmoid_ = static_cast(config.sigmoid); if (sigmoid_ <= 0.0) { @@ -172,7 +172,7 @@ class BinaryLogloss: public ObjectiveFunction { bool NeedAccuratePrediction() const override { return false; } -private: + private: /*! \brief Number of data */ data_size_t num_data_; /*! \brief Pointer of label */ diff --git a/src/objective/multiclass_objective.hpp b/src/objective/multiclass_objective.hpp index 7613c963e36..7c34e679878 100644 --- a/src/objective/multiclass_objective.hpp +++ b/src/objective/multiclass_objective.hpp @@ -14,7 +14,7 @@ namespace LightGBM { * \brief Objective function for multiclass classification, use softmax as objective functions */ class MulticlassSoftmax: public ObjectiveFunction { -public: + public: explicit MulticlassSoftmax(const Config& config) { num_class_ = config.num_class; } @@ -146,7 +146,7 @@ class MulticlassSoftmax: public ObjectiveFunction { } } -private: + private: /*! \brief Number of data */ data_size_t num_data_; /*! \brief Number of classes */ @@ -164,7 +164,7 @@ class MulticlassSoftmax: public ObjectiveFunction { * \brief Objective function for multiclass classification, use one-vs-all binary objective function */ class MulticlassOVA: public ObjectiveFunction { -public: + public: explicit MulticlassOVA(const Config& config) { num_class_ = config.num_class; for (int i = 0; i < num_class_; ++i) { @@ -246,7 +246,7 @@ class MulticlassOVA: public ObjectiveFunction { return binary_loss_[class_id]->ClassNeedTrain(0); } -private: + private: /*! \brief Number of data */ data_size_t num_data_; /*! \brief Number of classes */ diff --git a/src/objective/rank_objective.hpp b/src/objective/rank_objective.hpp index 3a618a0a0d7..b926ac2deee 100644 --- a/src/objective/rank_objective.hpp +++ b/src/objective/rank_objective.hpp @@ -17,7 +17,7 @@ namespace LightGBM { * \brief Objective function for Lambdrank with NDCG */ class LambdarankNDCG: public ObjectiveFunction { -public: + public: explicit LambdarankNDCG(const Config& config) { sigmoid_ = static_cast(config.sigmoid); label_gain_ = config.label_gain; @@ -205,7 +205,7 @@ class LambdarankNDCG: public ObjectiveFunction { bool NeedAccuratePrediction() const override { return false; } -private: + private: /*! \brief Gains for labels */ std::vector label_gain_; /*! \brief Cache inverse max DCG, speed up calculation */ diff --git a/src/objective/regression_objective.hpp b/src/objective/regression_objective.hpp index d43c170b2e9..a35b2461667 100644 --- a/src/objective/regression_objective.hpp +++ b/src/objective/regression_objective.hpp @@ -69,7 +69,7 @@ namespace LightGBM { * \brief Objective function for regression */ class RegressionL2loss: public ObjectiveFunction { -public: + public: explicit RegressionL2loss(const Config& config) { sqrt_ = config.reg_sqrt; } @@ -165,7 +165,7 @@ class RegressionL2loss: public ObjectiveFunction { return suml / sumw; } -protected: + protected: bool sqrt_; /*! \brief Number of data */ data_size_t num_data_; @@ -180,7 +180,7 @@ class RegressionL2loss: public ObjectiveFunction { * \brief L1 regression loss */ class RegressionL1loss: public RegressionL2loss { -public: + public: explicit RegressionL1loss(const Config& config): RegressionL2loss(config) { } @@ -298,7 +298,7 @@ class RegressionL1loss: public RegressionL2loss { * \brief Huber regression loss */ class RegressionHuberLoss: public RegressionL2loss { -public: + public: explicit RegressionHuberLoss(const Config& config): RegressionL2loss(config) { alpha_ = static_cast(config.alpha); if (sqrt_) { @@ -352,7 +352,7 @@ class RegressionHuberLoss: public RegressionL2loss { return false; } -private: + private: /*! \brief delta for Huber loss */ double alpha_; }; @@ -360,7 +360,7 @@ class RegressionHuberLoss: public RegressionL2loss { // http://research.microsoft.com/en-us/um/people/zhang/INRIA/Publis/Tutorial-Estim/node24.html class RegressionFairLoss: public RegressionL2loss { -public: + public: explicit RegressionFairLoss(const Config& config): RegressionL2loss(config) { c_ = static_cast(config.fair_c); } @@ -397,7 +397,7 @@ class RegressionFairLoss: public RegressionL2loss { return false; } -private: + private: /*! \brief c for Fair loss */ double c_; }; @@ -407,7 +407,7 @@ class RegressionFairLoss: public RegressionL2loss { * \brief Objective function for Poisson regression */ class RegressionPoissonLoss: public RegressionL2loss { -public: + public: explicit RegressionPoissonLoss(const Config& config): RegressionL2loss(config) { max_delta_step_ = static_cast(config.poisson_max_delta_step); if (sqrt_) { @@ -481,13 +481,13 @@ class RegressionPoissonLoss: public RegressionL2loss { return false; } -private: + private: /*! \brief used to safeguard optimization */ double max_delta_step_; }; class RegressionQuantileloss : public RegressionL2loss { -public: + public: explicit RegressionQuantileloss(const Config& config): RegressionL2loss(config) { alpha_ = static_cast(config.alpha); CHECK(alpha_ > 0 && alpha_ < 1); @@ -607,7 +607,7 @@ class RegressionQuantileloss : public RegressionL2loss { } } -private: + private: score_t alpha_; }; @@ -616,7 +616,7 @@ class RegressionQuantileloss : public RegressionL2loss { * \brief Mape Regression Loss */ class RegressionMAPELOSS : public RegressionL1loss { -public: + public: explicit RegressionMAPELOSS(const Config& config) : RegressionL1loss(config) { } @@ -725,7 +725,7 @@ class RegressionMAPELOSS : public RegressionL1loss { return true; } -private: + private: std::vector label_weight_; }; @@ -735,7 +735,7 @@ class RegressionMAPELOSS : public RegressionL1loss { * \brief Objective function for Gamma regression */ class RegressionGammaLoss : public RegressionPoissonLoss { -public: + public: explicit RegressionGammaLoss(const Config& config) : RegressionPoissonLoss(config) { } @@ -770,7 +770,7 @@ class RegressionGammaLoss : public RegressionPoissonLoss { * \brief Objective function for Tweedie regression */ class RegressionTweedieLoss: public RegressionPoissonLoss { -public: + public: explicit RegressionTweedieLoss(const Config& config) : RegressionPoissonLoss(config) { rho_ = config.tweedie_variance_power; } @@ -803,7 +803,7 @@ class RegressionTweedieLoss: public RegressionPoissonLoss { return "tweedie"; } -private: + private: double rho_; }; diff --git a/src/objective/xentropy_objective.hpp b/src/objective/xentropy_objective.hpp index d56b95a20a1..a116ad21aa7 100644 --- a/src/objective/xentropy_objective.hpp +++ b/src/objective/xentropy_objective.hpp @@ -36,7 +36,7 @@ namespace LightGBM { * \brief Objective function for cross-entropy (with optional linear weights) */ class CrossEntropy: public ObjectiveFunction { -public: + public: explicit CrossEntropy(const Config&) { } @@ -127,7 +127,7 @@ class CrossEntropy: public ObjectiveFunction { return initscore; } -private: + private: /*! \brief Number of data points */ data_size_t num_data_; /*! \brief Pointer for label */ @@ -140,7 +140,7 @@ class CrossEntropy: public ObjectiveFunction { * \brief Objective function for alternative parameterization of cross-entropy (see top of file for explanation) */ class CrossEntropyLambda: public ObjectiveFunction { -public: + public: explicit CrossEntropyLambda(const Config&) { min_weight_ = max_weight_ = 0.0f; } diff --git a/src/treelearner/data_partition.hpp b/src/treelearner/data_partition.hpp index 8b1a89c0eea..d7956247a53 100644 --- a/src/treelearner/data_partition.hpp +++ b/src/treelearner/data_partition.hpp @@ -15,7 +15,7 @@ namespace LightGBM { * \brief DataPartition is used to store the the partition of data on tree. */ class DataPartition { -public: + public: DataPartition(data_size_t num_data, int num_leaves) :num_data_(num_data), num_leaves_(num_leaves) { leaf_begin_.resize(num_leaves_); @@ -188,7 +188,7 @@ class DataPartition { /*! \brief Get number of leaves */ int num_leaves() const { return num_leaves_; } -private: + private: /*! \brief Number of all data */ data_size_t num_data_; /*! \brief Number of all leaves */ diff --git a/src/treelearner/feature_histogram.hpp b/src/treelearner/feature_histogram.hpp index 5157e447c80..29fb349c623 100644 --- a/src/treelearner/feature_histogram.hpp +++ b/src/treelearner/feature_histogram.hpp @@ -12,7 +12,7 @@ namespace LightGBM { class FeatureMetainfo { -public: + public: int num_bin; MissingType missing_type; int8_t bias = 0; @@ -27,7 +27,7 @@ class FeatureMetainfo { * \brief FeatureHistogram is used to construct and store a histogram for a feature. */ class FeatureHistogram { -public: + public: FeatureHistogram() { data_ = nullptr; } @@ -449,7 +449,7 @@ class FeatureHistogram { } } -private: + private: static double GetSplitGains(double sum_left_gradients, double sum_left_hessians, double sum_right_gradients, double sum_right_hessians, double l1, double l2, double max_delta_step, @@ -644,7 +644,7 @@ class FeatureHistogram { std::function find_best_threshold_fun_; }; class HistogramPool { -public: + public: /*! * \brief Constructor */ @@ -804,7 +804,7 @@ class HistogramPool { inverse_mapper_[slot] = dst_idx; } -private: + private: std::vector> pool_; std::vector> data_; std::vector feature_metas_; diff --git a/src/treelearner/gpu_tree_learner.h b/src/treelearner/gpu_tree_learner.h index 7b68013caa6..26761f5e9fa 100644 --- a/src/treelearner/gpu_tree_learner.h +++ b/src/treelearner/gpu_tree_learner.h @@ -36,7 +36,7 @@ namespace LightGBM { * \brief GPU-based parallel learning algorithm. */ class GPUTreeLearner: public SerialTreeLearner { -public: + public: explicit GPUTreeLearner(const Config* tree_config); ~GPUTreeLearner(); void Init(const Dataset* train_data, bool is_constant_hessian) override; @@ -57,14 +57,14 @@ class GPUTreeLearner: public SerialTreeLearner { use_bagging_ = false; } -protected: + protected: void BeforeTrain() override; bool BeforeFindBestSplit(const Tree* tree, int left_leaf, int right_leaf) override; void FindBestSplits() override; void Split(Tree* tree, int best_Leaf, int* left_leaf, int* right_leaf) override; void ConstructHistograms(const std::vector& is_feature_used, bool use_subtract) override; -private: + private: /*! \brief 4-byte feature tuple used by GPU kernels */ struct Feature4 { uint8_t s[4]; @@ -269,7 +269,7 @@ class GPUTreeLearner: public SerialTreeLearner { namespace LightGBM { class GPUTreeLearner: public SerialTreeLearner { -public: + public: #pragma warning(disable : 4702) explicit GPUTreeLearner(const Config* tree_config) : SerialTreeLearner(tree_config) { Log::Fatal("GPU Tree Learner was not enabled in this build.\n" diff --git a/src/treelearner/leaf_splits.hpp b/src/treelearner/leaf_splits.hpp index c629289d2d1..24863a34186 100644 --- a/src/treelearner/leaf_splits.hpp +++ b/src/treelearner/leaf_splits.hpp @@ -14,7 +14,7 @@ namespace LightGBM { * \brief used to find split candidates for a leaf */ class LeafSplits { -public: + public: LeafSplits(data_size_t num_data) :num_data_in_leaf_(num_data), num_data_(num_data), data_indices_(nullptr) { @@ -141,7 +141,7 @@ class LeafSplits { const data_size_t* data_indices() const { return data_indices_; } -private: + private: /*! \brief current leaf index */ int leaf_index_; /*! \brief number of data on current leaf */ diff --git a/src/treelearner/parallel_tree_learner.h b/src/treelearner/parallel_tree_learner.h index cfc19f14e75..e9a96b1a806 100644 --- a/src/treelearner/parallel_tree_learner.h +++ b/src/treelearner/parallel_tree_learner.h @@ -20,15 +20,16 @@ namespace LightGBM { */ template class FeatureParallelTreeLearner: public TREELEARNER_T { -public: + public: explicit FeatureParallelTreeLearner(const Config* config); ~FeatureParallelTreeLearner(); void Init(const Dataset* train_data, bool is_constant_hessian) override; -protected: + protected: void BeforeTrain() override; void FindBestSplitsFromHistograms(const std::vector& is_feature_used, bool use_subtract) override; -private: + + private: /*! \brief rank of local machine */ int rank_; /*! \brief Number of machines of this parallel task */ @@ -46,13 +47,13 @@ class FeatureParallelTreeLearner: public TREELEARNER_T { */ template class DataParallelTreeLearner: public TREELEARNER_T { -public: + public: explicit DataParallelTreeLearner(const Config* config); ~DataParallelTreeLearner(); void Init(const Dataset* train_data, bool is_constant_hessian) override; void ResetConfig(const Config* config) override; -protected: + protected: void BeforeTrain() override; void FindBestSplits() override; void FindBestSplitsFromHistograms(const std::vector& is_feature_used, bool use_subtract) override; @@ -66,7 +67,7 @@ class DataParallelTreeLearner: public TREELEARNER_T { } } -private: + private: /*! \brief Rank of local machine */ int rank_; /*! \brief Number of machines of this parallel task */ @@ -100,13 +101,13 @@ class DataParallelTreeLearner: public TREELEARNER_T { */ template class VotingParallelTreeLearner: public TREELEARNER_T { -public: + public: explicit VotingParallelTreeLearner(const Config* config); ~VotingParallelTreeLearner() { } void Init(const Dataset* train_data, bool is_constant_hessian) override; void ResetConfig(const Config* config) override; -protected: + protected: void BeforeTrain() override; bool BeforeFindBestSplit(const Tree* tree, int left_leaf, int right_leaf) override; void FindBestSplits() override; @@ -136,7 +137,7 @@ class VotingParallelTreeLearner: public TREELEARNER_T { void CopyLocalHistogram(const std::vector& smaller_top_features, const std::vector& larger_top_features); -private: + private: /*! \brief Tree config used in local mode */ Config local_config_; /*! \brief Voting size */ diff --git a/src/treelearner/serial_tree_learner.h b/src/treelearner/serial_tree_learner.h index 098774d8cb1..2d1c7a72898 100644 --- a/src/treelearner/serial_tree_learner.h +++ b/src/treelearner/serial_tree_learner.h @@ -32,7 +32,7 @@ namespace LightGBM { * \brief Used for learning a tree by single machine */ class SerialTreeLearner: public TreeLearner { -public: + public: explicit SerialTreeLearner(const Config* config); ~SerialTreeLearner(); @@ -75,7 +75,7 @@ class SerialTreeLearner: public TreeLearner { void RenewTreeOutput(Tree* tree, const ObjectiveFunction* obj, double prediction, data_size_t total_num_data, const data_size_t* bag_indices, data_size_t bag_cnt) const override; -protected: + protected: /*! * \brief Some initial works before training */ diff --git a/src/treelearner/split_info.hpp b/src/treelearner/split_info.hpp index 0927f64a4e4..303f1a126aa 100644 --- a/src/treelearner/split_info.hpp +++ b/src/treelearner/split_info.hpp @@ -15,7 +15,7 @@ namespace LightGBM { * \brief Used to store some information for gain split point */ struct SplitInfo { -public: + public: /*! \brief Feature index */ int feature = -1; /*! \brief Split threshold */ @@ -188,7 +188,7 @@ struct SplitInfo { }; struct LightSplitInfo { -public: + public: /*! \brief Feature index */ int feature = -1; /*! \brief Split gain */