Skip to content

Commit

Permalink
Update base test class for f distribution
Browse files Browse the repository at this point in the history
  • Loading branch information
aherbert committed Sep 30, 2021
1 parent 366697c commit 2fab0f2
Show file tree
Hide file tree
Showing 2 changed files with 69 additions and 100 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,147 +17,78 @@
package org.apache.commons.statistics.distribution;

import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;

/**
* Test cases for FDistribution.
* Extends ContinuousDistributionAbstractTest. See class javadoc for
* ContinuousDistributionAbstractTest for details.
* Test cases for {@link FDistribution}.
* Extends {@link BaseContinuousDistributionTest}. See javadoc of that class for details.
*/
class FDistributionTest extends ContinuousDistributionAbstractTest {

//---------------------- Override tolerance --------------------------------

@BeforeEach
void customSetUp() {
setTolerance(1e-9);
}

//-------------- Implementations for abstract methods ----------------------

@Override
public FDistribution makeDistribution() {
return new FDistribution(5.0, 6.0);
}

@Override
public double[] makeCumulativeTestPoints() {
// quantiles computed using R version 2.9.2
return new double[] {0.0346808448626, 0.0937009113303, 0.143313661184, 0.202008445998, 0.293728320107,
20.8026639595, 8.74589525602, 5.98756512605, 4.38737418741, 3.10751166664};
}

@Override
public double[] makeCumulativeTestValues() {
return new double[] {0.001, 0.01, 0.025, 0.05, 0.1, 0.999, 0.990, 0.975, 0.950, 0.900};
}

class FDistributionTest extends BaseContinuousDistributionTest {
@Override
public double[] makeDensityTestValues() {
return new double[] {0.0689156576706, 0.236735653193, 0.364074131941, 0.481570789649, 0.595880479994,
0.000133443915657, 0.00286681303403, 0.00969192007502, 0.0242883861471, 0.0605491314658};
ContinuousDistribution makeDistribution(Object... parameters) {
final double df1 = (Double) parameters[0];
final double df2 = (Double) parameters[1];
return new FDistribution(df1, df2);
}

@Override
public double[] makeCumulativePrecisionTestPoints() {
return new double[] {1e-7, 4e-8, 9e-8};
protected double getTolerance() {
return 1e-9;
}

@Override
public double[] makeCumulativePrecisionTestValues() {
// These were created using WolframAlpha
return new double[] {1.578691625481747e-17, 1.597523916857153e-18, 1.2131195257872846e-17};
Object[][] makeInvalidParameters() {
return new Object[][] {
{0.0, 1.0},
{1.0, 0.0},
};
}

@Override
public double[] makeSurvivalPrecisionTestPoints() {
return new double[] {1e6, 42e5, 63e5};
}

@Override
public double[] makeSurvivalPrecisionTestValues() {
// These were created using WolframAlpha
return new double[] {1.1339943867175144e-17, 1.5306104409634358e-19, 4.535143828961954e-20};
String[] getParameterNames() {
return new String[] {"NumeratorDegreesOfFreedom", "DenominatorDegreesOfFreedom"};
}

//-------------------- Additional test cases -------------------------------

@Test
void testCumulativeProbabilityExtremes() {
setDistribution(makeDistribution());
setCumulativeTestPoints(new double[] {-2, 0});
setCumulativeTestValues(new double[] {0, 0});
verifyCumulativeProbabilities();
}

@Test
void testInverseCumulativeProbabilityExtremes() {
setDistribution(makeDistribution());
setInverseCumulativeTestPoints(new double[] {0, 1});
setInverseCumulativeTestValues(new double[] {0, Double.POSITIVE_INFINITY});
verifyInverseCumulativeProbabilities();
}

@ParameterizedTest
@CsvSource({
"11, 12",
"101, 400",
})
void testParameterAccessors(double df1, double df2) {
final FDistribution dist = new FDistribution(df1, df2);
Assertions.assertEquals(df1, dist.getNumeratorDegreesOfFreedom());
Assertions.assertEquals(df2, dist.getDenominatorDegreesOfFreedom());
}

@ParameterizedTest
@CsvSource({
"0, 1",
"1, 0",
})
void testConstructorPreconditions(double df1, double df2) {
Assertions.assertThrows(DistributionException.class, () -> new FDistribution(df1, df2));
}

@Test
void testMoments() {
final double tol = 1e-9;
void testAdditionalMoments() {
FDistribution dist;

dist = new FDistribution(1, 2);
Assertions.assertTrue(Double.isNaN(dist.getMean()));
Assertions.assertTrue(Double.isNaN(dist.getVariance()));
Assertions.assertEquals(Double.NaN, dist.getMean());
Assertions.assertEquals(Double.NaN, dist.getVariance());

dist = new FDistribution(1, 3);
Assertions.assertEquals(3d / (3d - 2d), dist.getMean(), tol);
Assertions.assertTrue(Double.isNaN(dist.getVariance()));
Assertions.assertEquals(3d / (3d - 2d), dist.getMean());
Assertions.assertEquals(Double.NaN, dist.getVariance());

dist = new FDistribution(1, 5);
Assertions.assertEquals(5d / (5d - 2d), dist.getMean(), tol);
Assertions.assertEquals((2d * 5d * 5d * 4d) / 9d, dist.getVariance(), tol);
Assertions.assertEquals(5d / (5d - 2d), dist.getMean());
Assertions.assertEquals((2d * 5d * 5d * 4d) / 9d, dist.getVariance());
}

@Test
void testLargeDegreesOfFreedom() {
final double x0 = 0.999;
final FDistribution fd = new FDistribution(100000, 100000);
final double p = fd.cumulativeProbability(.999);
final double p = fd.cumulativeProbability(x0);
final double x = fd.inverseCumulativeProbability(p);
Assertions.assertEquals(.999, x, 1.0e-5);
Assertions.assertEquals(x0, x, 1.0e-5);
}

@Test
void testSmallDegreesOfFreedom() {
final double x0 = 0.975;
FDistribution fd = new FDistribution(1, 1);
double p = fd.cumulativeProbability(0.975);
double p = fd.cumulativeProbability(x0);
double x = fd.inverseCumulativeProbability(p);
Assertions.assertEquals(0.975, x, 1.0e-5);
Assertions.assertEquals(x0, x, 1.0e-5);

fd = new FDistribution(1, 2);
p = fd.cumulativeProbability(0.975);
p = fd.cumulativeProbability(x0);
x = fd.inverseCumulativeProbability(p);
Assertions.assertEquals(0.975, x, 1.0e-5);
Assertions.assertEquals(x0, x, 1.0e-5);
}

@Test
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
parameters = 5.0 6.0
# Computed using R 2.9.2
mean = 1.5
variance = 4.05
lower = 0
cdf.points = \
0.000000000000000000 0.034680844862628298 0.093700911330275707 \
0.143313661184410718 0.202008445998125286 0.293728320106749319 \
20.802663959456705101 8.745895256019915465 5.987565126046930253 \
4.387374187406129167 3.107511666638931302 Inf
cdf.values = \
0, 0.001, 0.01, 0.025, 0.05, 0.1, 0.999,\
0.990, 0.975, 0.950, 0.900, 1
pdf.values = \
0.00000000000000000000 0.06891565767071070048 0.23673565319275807761 \
0.36407413194146687196 0.48157078964913180297 0.59588047999337290239 \
0.00013344391565753327 0.00286681303402549050 0.00969192007503559549 \
0.02428838614718745875 0.06054913146582809741 0.00000000000000000000
# Computed using WolframAlpha
cdf.hp.points = 1e-7, 4e-8, 9e-8
cdf.hp.values = 1.578691625481747e-17, 1.597523916857153e-18, 1.2131195257872846e-17
# These were created using WolframAlpha. NOTE lambda parameter is 1/mean
sf.hp.points = 1e6, 42e5, 63e5
sf.hp.values = 1.1339943867175144e-17, 1.5306104409634358e-19, 4.535143828961954e-20

0 comments on commit 2fab0f2

Please sign in to comment.