Skip to content

Commit

Permalink
Merge pull request #296 from gAldeia/bug_fixes
Browse files Browse the repository at this point in the history
Bug fixes in verbosity and logfile
  • Loading branch information
lacava committed Nov 18, 2023
2 parents 51816f4 + b5bc524 commit 459e021
Show file tree
Hide file tree
Showing 8 changed files with 70 additions and 40 deletions.
2 changes: 1 addition & 1 deletion configure
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ elif [ "$1" == "lpc_cuda" ] ; then
BUILD_DIR="buildGPU"
OMP=OFF
EXTRA_FLAGS="-DCORE_USE_CUDA=ON"
elif [ "$1" == "tests" || "$1" == "test" ] ; then
elif [[ "$1" == "tests" || "$1" == "test" ]] ; then
EXTRA_FLAGS="-DGTEST=ON"
elif [ "$1" == "gpu" ] ; then
BUILD_DIR="buildGPU"
Expand Down
7 changes: 5 additions & 2 deletions feat/feat.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ class Feat(BaseEstimator):
'a': all
'b': boolean only
'f': floating point only
functions: string, optional (default: "")
functions: list[string], optional (default: [])
A comma-separated string of operators to use to build features.
If functions="", all the available functions are used.
If functions=[], all the available functions are used.
Options: +, -, *, /, ^2, ^3, sqrt, sin, cos, exp, log, ^, logit, tanh,
gauss, relu, split, split_c, b2f, c2f, and, or, not, xor, =, <, <=, >,
>=, if, ite
Expand Down Expand Up @@ -398,6 +398,9 @@ def _check_shape(self, X):
def get_representation(self): return self.cfeat_.get_representation()
def get_model(self, sort=True): return self.cfeat_.get_model(sort)
def get_coefs(self): return self.cfeat_.get_coefs()
def get_n_params(self): return self.cfeat_.get_n_params()
def get_dim(self): return self.cfeat_.get_dim()
def get_n_nodes(self): return self.cfeat_.get_n_nodes()

class FeatRegressor(Feat):
"""Convenience method that enforces regression options."""
Expand Down
71 changes: 41 additions & 30 deletions src/feat.cc
Original file line number Diff line number Diff line change
Expand Up @@ -716,6 +716,13 @@ void Feat::run_generation(unsigned int g,
if (params.max_stall > 0)
update_stall_count(stall_count, updated_best);

if ( (use_arch || params.verbosity>1) || !logfile.empty()) {
// set objectives to make sure they are reported in log/verbose/arch
#pragma omp parallel for
for (unsigned int i=0; i<pop.size(); ++i)
pop.individuals.at(i).set_obj(params.objectives);
}

logger.log("update archive...",2);
if (use_arch)
archive.update(pop,params);
Expand All @@ -725,6 +732,9 @@ void Feat::run_generation(unsigned int g,
else if(params.verbosity == 1)
printProgress(fraction);

if (!logfile.empty())
log_stats(log);

if (save_pop > 1)
pop.save(this->logfile+".pop.gen" +
to_string(params.current_gen) + ".json");
Expand Down Expand Up @@ -1357,7 +1367,8 @@ void Feat::calculate_stats(const DataRef& d)
ArrayXf Complexities(this->pop.size());
i = 0;
for (auto& p : this->pop.individuals)
{
{
// Calculate to assure it gets reported in stats (even if's not used as an obj)
Complexities(i) = p.get_complexity();
++i;
}
Expand Down Expand Up @@ -1520,37 +1531,37 @@ void Feat::print_stats(std::ofstream& log, float fraction)
}

std::cout <<"\n\n";

if (!logfile.empty())
{
// print stats in tabular format
string sep = ",";
if (params.current_gen == 0) // print header
{
log << "generation" << sep
<< "time" << sep
<< "min_loss" << sep
<< "min_loss_val" << sep
<< "med_loss" << sep
<< "med_loss_val" << sep
<< "med_size" << sep
<< "med_complexity" << sep
<< "med_num_params" << sep
<< "med_dim\n";
}
}

log << params.current_gen << sep
<< timer.Elapsed().count() << sep
<< stats.min_loss.back() << sep
<< this->min_loss_v << sep
<< stats.med_loss.back() << sep
<< stats.med_loss_v.back() << sep
<< stats.med_size.back() << sep
<< stats.med_complexity.back() << sep
<< stats.med_num_params.back() << sep
<< stats.med_dim.back() << "\n";
}
void Feat::log_stats(std::ofstream& log)
{
// print stats in tabular format
string sep = ",";
if (params.current_gen == 0) // print header
{
log << "generation" << sep
<< "time" << sep
<< "min_loss" << sep
<< "min_loss_val" << sep
<< "med_loss" << sep
<< "med_loss_val" << sep
<< "med_size" << sep
<< "med_complexity" << sep
<< "med_num_params" << sep
<< "med_dim" << "\n";
}
log << params.current_gen << sep
<< timer.Elapsed().count() << sep
<< stats.min_loss.back() << sep
<< this->min_loss_v << sep
<< stats.med_loss.back() << sep
<< stats.med_loss_v.back() << sep
<< stats.med_size.back() << sep
<< stats.med_complexity.back() << sep
<< stats.med_num_params.back() << sep
<< stats.med_dim.back() << "\n";
}

//TODO: replace these with json
json Feat::get_stats()
{
Expand Down
3 changes: 2 additions & 1 deletion src/feat.h
Original file line number Diff line number Diff line change
Expand Up @@ -433,7 +433,8 @@ class Feat
void calculate_stats(const DataRef& d);
void print_stats(std::ofstream& log,
float fraction);

void log_stats(std::ofstream& log);

// gets weights via univariate initial models
vector<float> univariate_initial_model(DataRef &d, int n_feats);
/// method to fit inital ml model
Expand Down
4 changes: 0 additions & 4 deletions src/pop/archive.cc
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,6 @@ namespace FT{

vector<Individual> tmp = pop.individuals;

#pragma omp parallel for
for (unsigned int i=0; i<tmp.size(); ++i)
tmp.at(i).set_obj(params.objectives);

for (const auto& p : individuals)
tmp.push_back(p);

Expand Down
3 changes: 3 additions & 0 deletions src/pybind.cc
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,9 @@ PYBIND11_MODULE(_feat, m)
.def("save", &Feat::save)
.def("load", &Feat::load)
.def("get_representation", &Feat::get_representation)
.def("get_n_params", &Feat::get_n_params)
.def("get_dim", &Feat::get_dim)
.def("get_n_nodes", &Feat::get_n_nodes)
.def("get_model", &Feat::get_model, py::arg("sort") = true)
.def("get_eqn", &Feat::get_eqn, py::arg("sort") = true)
;
Expand Down
2 changes: 1 addition & 1 deletion src/sel/tournament.cc
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ namespace FT{
* @class Tournament
*/

Tournament::Tournament(bool surv){ name = "nsga2"; survival = surv; };
Tournament::Tournament(bool surv){ name = "tournament"; survival = surv; };

Tournament::~Tournament(){}

Expand Down
18 changes: 17 additions & 1 deletion tests/selectionTests.cc
Original file line number Diff line number Diff line change
@@ -1,10 +1,22 @@
#include "testsHeader.h"

TEST(Selection, SelectionOperator)
class SelectionTest : public testing::TestWithParam<std::string> {
protected:
void SetUp() override {
selectionType = GetParam();
}

std::string selectionType;
};

TEST_P(SelectionTest, SelectionOperator)
{
Feat feat = make_estimator(100, 100, "LinearRidgeRegression", false, 1, 666);
feat.set_scorer("mae");

feat.set_selection(selectionType);
ASSERT_STREQ(selectionType.c_str(), feat.selector.get_type().c_str());

MatrixXf X(7,2);
X << 0,1,
0.47942554,0.87758256,
Expand Down Expand Up @@ -58,3 +70,7 @@ TEST(Selection, SelectionOperator)

ASSERT_EQ(parents.size(), feat.get_pop_size());
}

INSTANTIATE_TEST_SUITE_P(AllSelectionTypes, SelectionTest,
testing::Values("lexicase", "fair_lexicase", "simanneal", "tournament",
"offspring", "random", "nsga2"));

0 comments on commit 459e021

Please sign in to comment.