Skip to content
2 changes: 2 additions & 0 deletions docs/CHANGELOG.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@
classification and regression models. (See {ml-pull}2251[#2251].)
* Fix possible source of "x = NaN, distribution = class boost::math::normal_distribution<..."
log errors training classification and regression models. (See {ml-pull}2249[#2249].)
* Fix some bugs affecting decision to stop optimising hyperparameters for training
classification and regression models. (See {ml-pull}2259[#2259].)

== {es} version 8.2.0

Expand Down
9 changes: 5 additions & 4 deletions include/maths/common/CBayesianOptimisation.h
Original file line number Diff line number Diff line change
Expand Up @@ -125,9 +125,6 @@ class MATHS_COMMON_EXPORT CBayesianOptimisation {
//! of the total variance.
TDoubleDoublePrVec anovaMainEffects() const;

//! Set kernel \p parameters explicitly.
void kernelParameters(const TVector& parameters);

//! Get the memory used by this object.
std::size_t memoryUsage() const;

Expand All @@ -142,6 +139,9 @@ class MATHS_COMMON_EXPORT CBayesianOptimisation {

//! \name Test Interface
//@{
//! Set kernel \p parameters explicitly.
void kernelParameters(const TVector& parameters);

//! Get minus the data likelihood and its gradient as a function of the kernel
//! hyperparameters.
std::pair<TLikelihoodFunc, TLikelihoodGradientFunc> minusLikelihoodAndGradient() const;
Expand Down Expand Up @@ -189,8 +189,9 @@ class MATHS_COMMON_EXPORT CBayesianOptimisation {
TVectorDoublePr kernelCovariates(const TVector& a, const TVector& x, double vx) const;
double kernel(const TVector& a, const TVector& x, const TVector& y) const;
TVector kinvf() const;
TVector transformTo01(const TVector& x) const;
double dissimilarity(const TVector& x) const;
TVector to01(TVector x) const;
TVector from01(TVector x) const;
void checkRestoredInvariants() const;

private:
Expand Down
6 changes: 3 additions & 3 deletions lib/maths/analytics/unittest/CBoostedTreeTest.cc
Original file line number Diff line number Diff line change
Expand Up @@ -569,7 +569,7 @@ BOOST_AUTO_TEST_CASE(testLinear) {
0.0, modelBias[i][0],
6.0 * std::sqrt(noiseVariance / static_cast<double>(trainRows)));
// Good R^2...
BOOST_TEST_REQUIRE(modelRSquared[i][0] > 0.97);
BOOST_TEST_REQUIRE(modelRSquared[i][0] > 0.94);

meanModelRSquared.add(modelRSquared[i][0]);
}
Expand Down Expand Up @@ -879,7 +879,7 @@ BOOST_AUTO_TEST_CASE(testLowCardinalityFeatures) {
target, noiseVariance / static_cast<double>(rows));
LOG_DEBUG(<< "bias = " << bias << ", rSquared = " << rSquared);

BOOST_TEST_REQUIRE(rSquared > 0.96);
BOOST_TEST_REQUIRE(rSquared > 0.95);
}

BOOST_AUTO_TEST_CASE(testLowTrainFractionPerFold) {
Expand Down Expand Up @@ -1189,7 +1189,7 @@ BOOST_AUTO_TEST_CASE(testCategoricalRegressors) {
LOG_DEBUG(<< "bias = " << modelBias);
LOG_DEBUG(<< " R^2 = " << modelRSquared);
BOOST_REQUIRE_CLOSE_ABSOLUTE(0.0, modelBias, 0.1);
BOOST_TEST_REQUIRE(modelRSquared > 0.98);
BOOST_TEST_REQUIRE(modelRSquared > 0.97);
}

BOOST_AUTO_TEST_CASE(testFeatureBags) {
Expand Down
Loading