diff --git a/CHANGELOG.md b/CHANGELOG.md index 72016b1a..efb11250 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## 15.0.0 +- Breaking changes: + - `CrossValidator`: + - `targetNames` argument removed + - returning type changed to Future (previous one was `double`) + - `Assessable`, `assess` method: `targetNames` argument removed +- Precision metric added +- Coordinate descent optimization logic fixed: dtype considered + ## 14.2.6 - `injector` lib 1.0.9 supported diff --git a/benchmark/cross_validator.dart b/benchmark/cross_validator.dart index cec1f82c..e302e876 100644 --- a/benchmark/cross_validator.dart +++ b/benchmark/cross_validator.dart @@ -19,8 +19,8 @@ class CrossValidatorBenchmark extends BenchmarkBase { @override void run() { - crossValidator.evaluate((trainSamples, targetFeatureNames) => - KnnRegressor(trainSamples, targetFeatureNames.first, 7), + crossValidator.evaluate((trainSamples) => + KnnRegressor(trainSamples, 'col_20', 7), MetricType.mape); } @@ -28,11 +28,9 @@ class CrossValidatorBenchmark extends BenchmarkBase { void setup() { final samples = Matrix.fromRows(List.generate(observationsNum, (i) => Vector.randomFilled(columnsNum))); - final dataFrame = DataFrame.fromMatrix(samples); - crossValidator = CrossValidator.kFold(dataFrame, ['col_20'], - numberOfFolds: 5); + crossValidator = CrossValidator.kFold(dataFrame, numberOfFolds: 5); } void tearDown() {} diff --git a/e2e/datasets/advertising.csv b/e2e/datasets/advertising.csv new file mode 100644 index 00000000..9c8c55fd --- /dev/null +++ b/e2e/datasets/advertising.csv @@ -0,0 +1,201 @@ +Num,TV,Radio,Newspaper,Sales +1 ,230.1,37.8 ,69.2 ,22.1 +2 ,44.5 ,39.3 ,45.1 ,10.4 +3 ,17.2 ,45.9 ,69.3 ,9.3 +4 ,151.5,41.3 ,58.5 ,18.5 +5 ,180.8,10.8 ,58.4 ,12.9 +6 ,8.7 ,48.9 ,75 ,7.2 +7 ,57.5 ,32.8 ,23.5 ,11.8 +8 ,120.2,19.6 ,11.6 ,13.2 +9 ,8.6 ,2.1 ,1 ,4.8 +10 ,199.8,2.6 ,21.2 ,10.6 +11 ,66.1 ,5.8 ,24.2 ,8.6 +12 ,214.7,24 ,4 ,17.4 +13 ,23.8 ,35.1 ,65.9 ,9.2 +14 ,97.5 ,7.6 ,7.2 ,9.7 +15 ,204.1,32.9 ,46 ,19 +16 ,195.4,47.7 ,52.9 ,22.4 +17 ,67.8 ,36.6 ,114 ,12.5 +18 ,281.4,39.6 ,55.8 ,24.4 +19 ,69.2 ,20.5 ,18.3 ,11.3 +20 ,147.3,23.9 ,19.1 ,14.6 +21 ,218.4,27.7 ,53.4 ,18 +22 ,237.4,5.1 ,23.5 ,12.5 +23 ,13.2 ,15.9 ,49.6 ,5.6 +24 ,228.3,16.9 ,26.2 ,15.5 +25 ,62.3 ,12.6 ,18.3 ,9.7 +26 ,262.9,3.5 ,19.5 ,12 +27 ,142.9,29.3 ,12.6 ,15 +28 ,240.1,16.7 ,22.9 ,15.9 +29 ,248.8,27.1 ,22.9 ,18.9 +30 ,70.6 ,16 ,40.8 ,10.5 +31 ,292.9,28.3 ,43.2 ,21.4 +32 ,112.9,17.4 ,38.6 ,11.9 +33 ,97.2 ,1.5 ,30 ,9.6 +34 ,265.6,20 ,0.3 ,17.4 +35 ,95.7 ,1.4 ,7.4 ,9.5 +36 ,290.7,4.1 ,8.5 ,12.8 +37 ,266.9,43.8 ,5 ,25.4 +38 ,74.7 ,49.4 ,45.7 ,14.7 +39 ,43.1 ,26.7 ,35.1 ,10.1 +40 ,228 ,37.7 ,32 ,21.5 +41 ,202.5,22.3 ,31.6 ,16.6 +42 ,177 ,33.4 ,38.7 ,17.1 +43 ,293.6,27.7 ,1.8 ,20.7 +44 ,206.9,8.4 ,26.4 ,12.9 +45 ,25.1 ,25.7 ,43.3 ,8.5 +46 ,175.1,22.5 ,31.5 ,14.9 +47 ,89.7 ,9.9 ,35.7 ,10.6 +48 ,239.9,41.5 ,18.5 ,23.2 +49 ,227.2,15.8 ,49.9 ,14.8 +50 ,66.9 ,11.7 ,36.8 ,9.7 +51 ,199.8,3.1 ,34.6 ,11.4 +52 ,100.4,9.6 ,3.6 ,10.7 +53 ,216.4,41.7 ,39.6 ,22.6 +54 ,182.6,46.2 ,58.7 ,21.2 +55 ,262.7,28.8 ,15.9 ,20.2 +56 ,198.9,49.4 ,60 ,23.7 +57 ,7.3 ,28.1 ,41.4 ,5.5 +58 ,136.2,19.2 ,16.6 ,13.2 +59 ,210.8,49.6 ,37.7 ,23.8 +60 ,210.7,29.5 ,9.3 ,18.4 +61 ,53.5 ,2 ,21.4 ,8.1 +62 ,261.3,42.7 ,54.7 ,24.2 +63 ,239.3,15.5 ,27.3 ,15.7 +64 ,102.7,29.6 ,8.4 ,14 +65 ,131.1,42.8 ,28.9 ,18 +66 ,69 ,9.3 ,0.9 ,9.3 +67 ,31.5 ,24.6 ,2.2 ,9.5 +68 ,139.3,14.5 ,10.2 ,13.4 +69 ,237.4,27.5 ,11 ,18.9 +70 ,216.8,43.9 ,27.2 ,22.3 +71 ,199.1,30.6 ,38.7 ,18.3 +72 ,109.8,14.3 ,31.7 ,12.4 +73 ,26.8 ,33 ,19.3 ,8.8 +74 ,129.4,5.7 ,31.3 ,11 +75 ,213.4,24.6 ,13.1 ,17 +76 ,16.9 ,43.7 ,89.4 ,8.7 +77 ,27.5 ,1.6 ,20.7 ,6.9 +78 ,120.5,28.5 ,14.2 ,14.2 +79 ,5.4 ,29.9 ,9.4 ,5.3 +80 ,116 ,7.7 ,23.1 ,11 +81 ,76.4 ,26.7 ,22.3 ,11.8 +82 ,239.8,4.1 ,36.9 ,12.3 +83 ,75.3 ,20.3 ,32.5 ,11.3 +84 ,68.4 ,44.5 ,35.6 ,13.6 +85 ,213.5,43 ,33.8 ,21.7 +86 ,193.2,18.4 ,65.7 ,15.2 +87 ,76.3 ,27.5 ,16 ,12 +88 ,110.7,40.6 ,63.2 ,16 +89 ,88.3 ,25.5 ,73.4 ,12.9 +90 ,109.8,47.8 ,51.4 ,16.7 +91 ,134.3,4.9 ,9.3 ,11.2 +92 ,28.6 ,1.5 ,33 ,7.3 +93 ,217.7,33.5 ,59 ,19.4 +94 ,250.9,36.5 ,72.3 ,22.2 +95 ,107.4,14 ,10.9 ,11.5 +96 ,163.3,31.6 ,52.9 ,16.9 +97 ,197.6,3.5 ,5.9 ,11.7 +98 ,184.9,21 ,22 ,15.5 +99 ,289.7,42.3 ,51.2 ,25.4 +100,135.2,41.7 ,45.9 ,17.2 +101,222.4,4.3 ,49.8 ,11.7 +102,296.4,36.3 ,100.9,23.8 +103,280.2,10.1 ,21.4 ,14.8 +104,187.9,17.2 ,17.9 ,14.7 +105,238.2,34.3 ,5.3 ,20.7 +106,137.9,46.4 ,59 ,19.2 +107,25 ,11 ,29.7 ,7.2 +108,90.4 ,0.3 ,23.2 ,8.7 +109,13.1 ,0.4 ,25.6 ,5.3 +110,255.4,26.9 ,5.5 ,19.8 +111,225.8,8.2 ,56.5 ,13.4 +112,241.7,38 ,23.2 ,21.8 +113,175.7,15.4 ,2.4 ,14.1 +114,209.6,20.6 ,10.7 ,15.9 +115,78.2 ,46.8 ,34.5 ,14.6 +116,75.1 ,35 ,52.7 ,12.6 +117,139.2,14.3 ,25.6 ,12.2 +118,76.4 ,0.8 ,14.8 ,9.4 +119,125.7,36.9 ,79.2 ,15.9 +120,19.4 ,16 ,22.3 ,6.6 +121,141.3,26.8 ,46.2 ,15.5 +122,18.8 ,21.7 ,50.4 ,7 +123,224 ,2.4 ,15.6 ,11.6 +124,123.1,34.6 ,12.4 ,15.2 +125,229.5,32.3 ,74.2 ,19.7 +126,87.2 ,11.8 ,25.9 ,10.6 +127,7.8 ,38.9 ,50.6 ,6.6 +128,80.2 ,0 ,9.2 ,8.8 +129,220.3,49 ,3.2 ,24.7 +130,59.6 ,12 ,43.1 ,9.7 +131,0.7 ,39.6 ,8.7 ,1.6 +132,265.2,2.9 ,43 ,12.7 +133,8.4 ,27.2 ,2.1 ,5.7 +134,219.8,33.5 ,45.1 ,19.6 +135,36.9 ,38.6 ,65.6 ,10.8 +136,48.3 ,47 ,8.5 ,11.6 +137,25.6 ,39 ,9.3 ,9.5 +138,273.7,28.9 ,59.7 ,20.8 +139,43 ,25.9 ,20.5 ,9.6 +140,184.9,43.9 ,1.7 ,20.7 +141,73.4 ,17 ,12.9 ,10.9 +142,193.7,35.4 ,75.6 ,19.2 +143,220.5,33.2 ,37.9 ,20.1 +144,104.6,5.7 ,34.4 ,10.4 +145,96.2 ,14.8 ,38.9 ,11.4 +146,140.3,1.9 ,9 ,10.3 +147,240.1,7.3 ,8.7 ,13.2 +148,243.2,49 ,44.3 ,25.4 +149,38 ,40.3 ,11.9 ,10.9 +150,44.7 ,25.8 ,20.6 ,10.1 +151,280.7,13.9 ,37 ,16.1 +152,121 ,8.4 ,48.7 ,11.6 +153,197.6,23.3 ,14.2 ,16.6 +154,171.3,39.7 ,37.7 ,19 +155,187.8,21.1 ,9.5 ,15.6 +156,4.1 ,11.6 ,5.7 ,3.2 +157,93.9 ,43.5 ,50.5 ,15.3 +158,149.8,1.3 ,24.3 ,10.1 +159,11.7 ,36.9 ,45.2 ,7.3 +160,131.7,18.4 ,34.6 ,12.9 +161,172.5,18.1 ,30.7 ,14.4 +162,85.7 ,35.8 ,49.3 ,13.3 +163,188.4,18.1 ,25.6 ,14.9 +164,163.5,36.8 ,7.4 ,18 +165,117.2,14.7 ,5.4 ,11.9 +166,234.5,3.4 ,84.8 ,11.9 +167,17.9 ,37.6 ,21.6 ,8 +168,206.8,5.2 ,19.4 ,12.2 +169,215.4,23.6 ,57.6 ,17.1 +170,284.3,10.6 ,6.4 ,15 +171,50 ,11.6 ,18.4 ,8.4 +172,164.5,20.9 ,47.4 ,14.5 +173,19.6 ,20.1 ,17 ,7.6 +174,168.4,7.1 ,12.8 ,11.7 +175,222.4,3.4 ,13.1 ,11.5 +176,276.9,48.9 ,41.8 ,27 +177,248.4,30.2 ,20.3 ,20.2 +178,170.2,7.8 ,35.2 ,11.7 +179,276.7,2.3 ,23.7 ,11.8 +180,165.6,10 ,17.6 ,12.6 +181,156.6,2.6 ,8.3 ,10.5 +182,218.5,5.4 ,27.4 ,12.2 +183,56.2 ,5.7 ,29.7 ,8.7 +184,287.6,43 ,71.8 ,26.2 +185,253.8,21.3 ,30 ,17.6 +186,205 ,45.1 ,19.6 ,22.6 +187,139.5,2.1 ,26.6 ,10.3 +188,191.1,28.7 ,18.2 ,17.3 +189,286 ,13.9 ,3.7 ,15.9 +190,18.7 ,12.1 ,23.4 ,6.7 +191,39.5 ,41.1 ,5.8 ,10.8 +192,75.5 ,10.8 ,6 ,9.9 +193,17.2 ,4.1 ,31.6 ,5.9 +194,166.8,42 ,3.6 ,19.6 +195,149.7,35.6 ,6 ,17.3 +196,38.2 ,3.7 ,13.8 ,7.6 +197,94.2 ,4.9 ,8.1 ,9.7 +198,177 ,9.3 ,6.4 ,12.8 +199,283.6,42 ,66.2 ,25.5 +200,232.1,8.6 ,8.7 ,13.4 diff --git a/e2e/datasets/housing.csv b/e2e/datasets/housing.csv new file mode 100755 index 00000000..391fce59 --- /dev/null +++ b/e2e/datasets/housing.csv @@ -0,0 +1,506 @@ +0.00632 18.00 2.310 0 0.5380 6.5750 65.20 4.0900 1 296.0 15.30 396.90 4.98 24.00 +0.02731 0.00 7.070 0 0.4690 6.4210 78.90 4.9671 2 242.0 17.80 396.90 9.14 21.60 +0.02729 0.00 7.070 0 0.4690 7.1850 61.10 4.9671 2 242.0 17.80 392.83 4.03 34.70 +0.03237 0.00 2.180 0 0.4580 6.9980 45.80 6.0622 3 222.0 18.70 394.63 2.94 33.40 +0.06905 0.00 2.180 0 0.4580 7.1470 54.20 6.0622 3 222.0 18.70 396.90 5.33 36.20 +0.02985 0.00 2.180 0 0.4580 6.4300 58.70 6.0622 3 222.0 18.70 394.12 5.21 28.70 +0.08829 12.50 7.870 0 0.5240 6.0120 66.60 5.5605 5 311.0 15.20 395.60 12.43 22.90 +0.14455 12.50 7.870 0 0.5240 6.1720 96.10 5.9505 5 311.0 15.20 396.90 19.15 27.10 +0.21124 12.50 7.870 0 0.5240 5.6310 100.00 6.0821 5 311.0 15.20 386.63 29.93 16.50 +0.17004 12.50 7.870 0 0.5240 6.0040 85.90 6.5921 5 311.0 15.20 386.71 17.10 18.90 +0.22489 12.50 7.870 0 0.5240 6.3770 94.30 6.3467 5 311.0 15.20 392.52 20.45 15.00 +0.11747 12.50 7.870 0 0.5240 6.0090 82.90 6.2267 5 311.0 15.20 396.90 13.27 18.90 +0.09378 12.50 7.870 0 0.5240 5.8890 39.00 5.4509 5 311.0 15.20 390.50 15.71 21.70 +0.62976 0.00 8.140 0 0.5380 5.9490 61.80 4.7075 4 307.0 21.00 396.90 8.26 20.40 +0.63796 0.00 8.140 0 0.5380 6.0960 84.50 4.4619 4 307.0 21.00 380.02 10.26 18.20 +0.62739 0.00 8.140 0 0.5380 5.8340 56.50 4.4986 4 307.0 21.00 395.62 8.47 19.90 +1.05393 0.00 8.140 0 0.5380 5.9350 29.30 4.4986 4 307.0 21.00 386.85 6.58 23.10 +0.78420 0.00 8.140 0 0.5380 5.9900 81.70 4.2579 4 307.0 21.00 386.75 14.67 17.50 +0.80271 0.00 8.140 0 0.5380 5.4560 36.60 3.7965 4 307.0 21.00 288.99 11.69 20.20 +0.72580 0.00 8.140 0 0.5380 5.7270 69.50 3.7965 4 307.0 21.00 390.95 11.28 18.20 +1.25179 0.00 8.140 0 0.5380 5.5700 98.10 3.7979 4 307.0 21.00 376.57 21.02 13.60 +0.85204 0.00 8.140 0 0.5380 5.9650 89.20 4.0123 4 307.0 21.00 392.53 13.83 19.60 +1.23247 0.00 8.140 0 0.5380 6.1420 91.70 3.9769 4 307.0 21.00 396.90 18.72 15.20 +0.98843 0.00 8.140 0 0.5380 5.8130 100.00 4.0952 4 307.0 21.00 394.54 19.88 14.50 +0.75026 0.00 8.140 0 0.5380 5.9240 94.10 4.3996 4 307.0 21.00 394.33 16.30 15.60 +0.84054 0.00 8.140 0 0.5380 5.5990 85.70 4.4546 4 307.0 21.00 303.42 16.51 13.90 +0.67191 0.00 8.140 0 0.5380 5.8130 90.30 4.6820 4 307.0 21.00 376.88 14.81 16.60 +0.95577 0.00 8.140 0 0.5380 6.0470 88.80 4.4534 4 307.0 21.00 306.38 17.28 14.80 +0.77299 0.00 8.140 0 0.5380 6.4950 94.40 4.4547 4 307.0 21.00 387.94 12.80 18.40 +1.00245 0.00 8.140 0 0.5380 6.6740 87.30 4.2390 4 307.0 21.00 380.23 11.98 21.00 +1.13081 0.00 8.140 0 0.5380 5.7130 94.10 4.2330 4 307.0 21.00 360.17 22.60 12.70 +1.35472 0.00 8.140 0 0.5380 6.0720 100.00 4.1750 4 307.0 21.00 376.73 13.04 14.50 +1.38799 0.00 8.140 0 0.5380 5.9500 82.00 3.9900 4 307.0 21.00 232.60 27.71 13.20 +1.15172 0.00 8.140 0 0.5380 5.7010 95.00 3.7872 4 307.0 21.00 358.77 18.35 13.10 +1.61282 0.00 8.140 0 0.5380 6.0960 96.90 3.7598 4 307.0 21.00 248.31 20.34 13.50 +0.06417 0.00 5.960 0 0.4990 5.9330 68.20 3.3603 5 279.0 19.20 396.90 9.68 18.90 +0.09744 0.00 5.960 0 0.4990 5.8410 61.40 3.3779 5 279.0 19.20 377.56 11.41 20.00 +0.08014 0.00 5.960 0 0.4990 5.8500 41.50 3.9342 5 279.0 19.20 396.90 8.77 21.00 +0.17505 0.00 5.960 0 0.4990 5.9660 30.20 3.8473 5 279.0 19.20 393.43 10.13 24.70 +0.02763 75.00 2.950 0 0.4280 6.5950 21.80 5.4011 3 252.0 18.30 395.63 4.32 30.80 +0.03359 75.00 2.950 0 0.4280 7.0240 15.80 5.4011 3 252.0 18.30 395.62 1.98 34.90 +0.12744 0.00 6.910 0 0.4480 6.7700 2.90 5.7209 3 233.0 17.90 385.41 4.84 26.60 +0.14150 0.00 6.910 0 0.4480 6.1690 6.60 5.7209 3 233.0 17.90 383.37 5.81 25.30 +0.15936 0.00 6.910 0 0.4480 6.2110 6.50 5.7209 3 233.0 17.90 394.46 7.44 24.70 +0.12269 0.00 6.910 0 0.4480 6.0690 40.00 5.7209 3 233.0 17.90 389.39 9.55 21.20 +0.17142 0.00 6.910 0 0.4480 5.6820 33.80 5.1004 3 233.0 17.90 396.90 10.21 19.30 +0.18836 0.00 6.910 0 0.4480 5.7860 33.30 5.1004 3 233.0 17.90 396.90 14.15 20.00 +0.22927 0.00 6.910 0 0.4480 6.0300 85.50 5.6894 3 233.0 17.90 392.74 18.80 16.60 +0.25387 0.00 6.910 0 0.4480 5.3990 95.30 5.8700 3 233.0 17.90 396.90 30.81 14.40 +0.21977 0.00 6.910 0 0.4480 5.6020 62.00 6.0877 3 233.0 17.90 396.90 16.20 19.40 +0.08873 21.00 5.640 0 0.4390 5.9630 45.70 6.8147 4 243.0 16.80 395.56 13.45 19.70 +0.04337 21.00 5.640 0 0.4390 6.1150 63.00 6.8147 4 243.0 16.80 393.97 9.43 20.50 +0.05360 21.00 5.640 0 0.4390 6.5110 21.10 6.8147 4 243.0 16.80 396.90 5.28 25.00 +0.04981 21.00 5.640 0 0.4390 5.9980 21.40 6.8147 4 243.0 16.80 396.90 8.43 23.40 +0.01360 75.00 4.000 0 0.4100 5.8880 47.60 7.3197 3 469.0 21.10 396.90 14.80 18.90 +0.01311 90.00 1.220 0 0.4030 7.2490 21.90 8.6966 5 226.0 17.90 395.93 4.81 35.40 +0.02055 85.00 0.740 0 0.4100 6.3830 35.70 9.1876 2 313.0 17.30 396.90 5.77 24.70 +0.01432 100.00 1.320 0 0.4110 6.8160 40.50 8.3248 5 256.0 15.10 392.90 3.95 31.60 +0.15445 25.00 5.130 0 0.4530 6.1450 29.20 7.8148 8 284.0 19.70 390.68 6.86 23.30 +0.10328 25.00 5.130 0 0.4530 5.9270 47.20 6.9320 8 284.0 19.70 396.90 9.22 19.60 +0.14932 25.00 5.130 0 0.4530 5.7410 66.20 7.2254 8 284.0 19.70 395.11 13.15 18.70 +0.17171 25.00 5.130 0 0.4530 5.9660 93.40 6.8185 8 284.0 19.70 378.08 14.44 16.00 +0.11027 25.00 5.130 0 0.4530 6.4560 67.80 7.2255 8 284.0 19.70 396.90 6.73 22.20 +0.12650 25.00 5.130 0 0.4530 6.7620 43.40 7.9809 8 284.0 19.70 395.58 9.50 25.00 +0.01951 17.50 1.380 0 0.4161 7.1040 59.50 9.2229 3 216.0 18.60 393.24 8.05 33.00 +0.03584 80.00 3.370 0 0.3980 6.2900 17.80 6.6115 4 337.0 16.10 396.90 4.67 23.50 +0.04379 80.00 3.370 0 0.3980 5.7870 31.10 6.6115 4 337.0 16.10 396.90 10.24 19.40 +0.05789 12.50 6.070 0 0.4090 5.8780 21.40 6.4980 4 345.0 18.90 396.21 8.10 22.00 +0.13554 12.50 6.070 0 0.4090 5.5940 36.80 6.4980 4 345.0 18.90 396.90 13.09 17.40 +0.12816 12.50 6.070 0 0.4090 5.8850 33.00 6.4980 4 345.0 18.90 396.90 8.79 20.90 +0.08826 0.00 10.810 0 0.4130 6.4170 6.60 5.2873 4 305.0 19.20 383.73 6.72 24.20 +0.15876 0.00 10.810 0 0.4130 5.9610 17.50 5.2873 4 305.0 19.20 376.94 9.88 21.70 +0.09164 0.00 10.810 0 0.4130 6.0650 7.80 5.2873 4 305.0 19.20 390.91 5.52 22.80 +0.19539 0.00 10.810 0 0.4130 6.2450 6.20 5.2873 4 305.0 19.20 377.17 7.54 23.40 +0.07896 0.00 12.830 0 0.4370 6.2730 6.00 4.2515 5 398.0 18.70 394.92 6.78 24.10 +0.09512 0.00 12.830 0 0.4370 6.2860 45.00 4.5026 5 398.0 18.70 383.23 8.94 21.40 +0.10153 0.00 12.830 0 0.4370 6.2790 74.50 4.0522 5 398.0 18.70 373.66 11.97 20.00 +0.08707 0.00 12.830 0 0.4370 6.1400 45.80 4.0905 5 398.0 18.70 386.96 10.27 20.80 +0.05646 0.00 12.830 0 0.4370 6.2320 53.70 5.0141 5 398.0 18.70 386.40 12.34 21.20 +0.08387 0.00 12.830 0 0.4370 5.8740 36.60 4.5026 5 398.0 18.70 396.06 9.10 20.30 +0.04113 25.00 4.860 0 0.4260 6.7270 33.50 5.4007 4 281.0 19.00 396.90 5.29 28.00 +0.04462 25.00 4.860 0 0.4260 6.6190 70.40 5.4007 4 281.0 19.00 395.63 7.22 23.90 +0.03659 25.00 4.860 0 0.4260 6.3020 32.20 5.4007 4 281.0 19.00 396.90 6.72 24.80 +0.03551 25.00 4.860 0 0.4260 6.1670 46.70 5.4007 4 281.0 19.00 390.64 7.51 22.90 +0.05059 0.00 4.490 0 0.4490 6.3890 48.00 4.7794 3 247.0 18.50 396.90 9.62 23.90 +0.05735 0.00 4.490 0 0.4490 6.6300 56.10 4.4377 3 247.0 18.50 392.30 6.53 26.60 +0.05188 0.00 4.490 0 0.4490 6.0150 45.10 4.4272 3 247.0 18.50 395.99 12.86 22.50 +0.07151 0.00 4.490 0 0.4490 6.1210 56.80 3.7476 3 247.0 18.50 395.15 8.44 22.20 +0.05660 0.00 3.410 0 0.4890 7.0070 86.30 3.4217 2 270.0 17.80 396.90 5.50 23.60 +0.05302 0.00 3.410 0 0.4890 7.0790 63.10 3.4145 2 270.0 17.80 396.06 5.70 28.70 +0.04684 0.00 3.410 0 0.4890 6.4170 66.10 3.0923 2 270.0 17.80 392.18 8.81 22.60 +0.03932 0.00 3.410 0 0.4890 6.4050 73.90 3.0921 2 270.0 17.80 393.55 8.20 22.00 +0.04203 28.00 15.040 0 0.4640 6.4420 53.60 3.6659 4 270.0 18.20 395.01 8.16 22.90 +0.02875 28.00 15.040 0 0.4640 6.2110 28.90 3.6659 4 270.0 18.20 396.33 6.21 25.00 +0.04294 28.00 15.040 0 0.4640 6.2490 77.30 3.6150 4 270.0 18.20 396.90 10.59 20.60 +0.12204 0.00 2.890 0 0.4450 6.6250 57.80 3.4952 2 276.0 18.00 357.98 6.65 28.40 +0.11504 0.00 2.890 0 0.4450 6.1630 69.60 3.4952 2 276.0 18.00 391.83 11.34 21.40 +0.12083 0.00 2.890 0 0.4450 8.0690 76.00 3.4952 2 276.0 18.00 396.90 4.21 38.70 +0.08187 0.00 2.890 0 0.4450 7.8200 36.90 3.4952 2 276.0 18.00 393.53 3.57 43.80 +0.06860 0.00 2.890 0 0.4450 7.4160 62.50 3.4952 2 276.0 18.00 396.90 6.19 33.20 +0.14866 0.00 8.560 0 0.5200 6.7270 79.90 2.7778 5 384.0 20.90 394.76 9.42 27.50 +0.11432 0.00 8.560 0 0.5200 6.7810 71.30 2.8561 5 384.0 20.90 395.58 7.67 26.50 +0.22876 0.00 8.560 0 0.5200 6.4050 85.40 2.7147 5 384.0 20.90 70.80 10.63 18.60 +0.21161 0.00 8.560 0 0.5200 6.1370 87.40 2.7147 5 384.0 20.90 394.47 13.44 19.30 +0.13960 0.00 8.560 0 0.5200 6.1670 90.00 2.4210 5 384.0 20.90 392.69 12.33 20.10 +0.13262 0.00 8.560 0 0.5200 5.8510 96.70 2.1069 5 384.0 20.90 394.05 16.47 19.50 +0.17120 0.00 8.560 0 0.5200 5.8360 91.90 2.2110 5 384.0 20.90 395.67 18.66 19.50 +0.13117 0.00 8.560 0 0.5200 6.1270 85.20 2.1224 5 384.0 20.90 387.69 14.09 20.40 +0.12802 0.00 8.560 0 0.5200 6.4740 97.10 2.4329 5 384.0 20.90 395.24 12.27 19.80 +0.26363 0.00 8.560 0 0.5200 6.2290 91.20 2.5451 5 384.0 20.90 391.23 15.55 19.40 +0.10793 0.00 8.560 0 0.5200 6.1950 54.40 2.7778 5 384.0 20.90 393.49 13.00 21.70 +0.10084 0.00 10.010 0 0.5470 6.7150 81.60 2.6775 6 432.0 17.80 395.59 10.16 22.80 +0.12329 0.00 10.010 0 0.5470 5.9130 92.90 2.3534 6 432.0 17.80 394.95 16.21 18.80 +0.22212 0.00 10.010 0 0.5470 6.0920 95.40 2.5480 6 432.0 17.80 396.90 17.09 18.70 +0.14231 0.00 10.010 0 0.5470 6.2540 84.20 2.2565 6 432.0 17.80 388.74 10.45 18.50 +0.17134 0.00 10.010 0 0.5470 5.9280 88.20 2.4631 6 432.0 17.80 344.91 15.76 18.30 +0.13158 0.00 10.010 0 0.5470 6.1760 72.50 2.7301 6 432.0 17.80 393.30 12.04 21.20 +0.15098 0.00 10.010 0 0.5470 6.0210 82.60 2.7474 6 432.0 17.80 394.51 10.30 19.20 +0.13058 0.00 10.010 0 0.5470 5.8720 73.10 2.4775 6 432.0 17.80 338.63 15.37 20.40 +0.14476 0.00 10.010 0 0.5470 5.7310 65.20 2.7592 6 432.0 17.80 391.50 13.61 19.30 +0.06899 0.00 25.650 0 0.5810 5.8700 69.70 2.2577 2 188.0 19.10 389.15 14.37 22.00 +0.07165 0.00 25.650 0 0.5810 6.0040 84.10 2.1974 2 188.0 19.10 377.67 14.27 20.30 +0.09299 0.00 25.650 0 0.5810 5.9610 92.90 2.0869 2 188.0 19.10 378.09 17.93 20.50 +0.15038 0.00 25.650 0 0.5810 5.8560 97.00 1.9444 2 188.0 19.10 370.31 25.41 17.30 +0.09849 0.00 25.650 0 0.5810 5.8790 95.80 2.0063 2 188.0 19.10 379.38 17.58 18.80 +0.16902 0.00 25.650 0 0.5810 5.9860 88.40 1.9929 2 188.0 19.10 385.02 14.81 21.40 +0.38735 0.00 25.650 0 0.5810 5.6130 95.60 1.7572 2 188.0 19.10 359.29 27.26 15.70 +0.25915 0.00 21.890 0 0.6240 5.6930 96.00 1.7883 4 437.0 21.20 392.11 17.19 16.20 +0.32543 0.00 21.890 0 0.6240 6.4310 98.80 1.8125 4 437.0 21.20 396.90 15.39 18.00 +0.88125 0.00 21.890 0 0.6240 5.6370 94.70 1.9799 4 437.0 21.20 396.90 18.34 14.30 +0.34006 0.00 21.890 0 0.6240 6.4580 98.90 2.1185 4 437.0 21.20 395.04 12.60 19.20 +1.19294 0.00 21.890 0 0.6240 6.3260 97.70 2.2710 4 437.0 21.20 396.90 12.26 19.60 +0.59005 0.00 21.890 0 0.6240 6.3720 97.90 2.3274 4 437.0 21.20 385.76 11.12 23.00 +0.32982 0.00 21.890 0 0.6240 5.8220 95.40 2.4699 4 437.0 21.20 388.69 15.03 18.40 +0.97617 0.00 21.890 0 0.6240 5.7570 98.40 2.3460 4 437.0 21.20 262.76 17.31 15.60 +0.55778 0.00 21.890 0 0.6240 6.3350 98.20 2.1107 4 437.0 21.20 394.67 16.96 18.10 +0.32264 0.00 21.890 0 0.6240 5.9420 93.50 1.9669 4 437.0 21.20 378.25 16.90 17.40 +0.35233 0.00 21.890 0 0.6240 6.4540 98.40 1.8498 4 437.0 21.20 394.08 14.59 17.10 +0.24980 0.00 21.890 0 0.6240 5.8570 98.20 1.6686 4 437.0 21.20 392.04 21.32 13.30 +0.54452 0.00 21.890 0 0.6240 6.1510 97.90 1.6687 4 437.0 21.20 396.90 18.46 17.80 +0.29090 0.00 21.890 0 0.6240 6.1740 93.60 1.6119 4 437.0 21.20 388.08 24.16 14.00 +1.62864 0.00 21.890 0 0.6240 5.0190 100.00 1.4394 4 437.0 21.20 396.90 34.41 14.40 +3.32105 0.00 19.580 1 0.8710 5.4030 100.00 1.3216 5 403.0 14.70 396.90 26.82 13.40 +4.09740 0.00 19.580 0 0.8710 5.4680 100.00 1.4118 5 403.0 14.70 396.90 26.42 15.60 +2.77974 0.00 19.580 0 0.8710 4.9030 97.80 1.3459 5 403.0 14.70 396.90 29.29 11.80 +2.37934 0.00 19.580 0 0.8710 6.1300 100.00 1.4191 5 403.0 14.70 172.91 27.80 13.80 +2.15505 0.00 19.580 0 0.8710 5.6280 100.00 1.5166 5 403.0 14.70 169.27 16.65 15.60 +2.36862 0.00 19.580 0 0.8710 4.9260 95.70 1.4608 5 403.0 14.70 391.71 29.53 14.60 +2.33099 0.00 19.580 0 0.8710 5.1860 93.80 1.5296 5 403.0 14.70 356.99 28.32 17.80 +2.73397 0.00 19.580 0 0.8710 5.5970 94.90 1.5257 5 403.0 14.70 351.85 21.45 15.40 +1.65660 0.00 19.580 0 0.8710 6.1220 97.30 1.6180 5 403.0 14.70 372.80 14.10 21.50 +1.49632 0.00 19.580 0 0.8710 5.4040 100.00 1.5916 5 403.0 14.70 341.60 13.28 19.60 +1.12658 0.00 19.580 1 0.8710 5.0120 88.00 1.6102 5 403.0 14.70 343.28 12.12 15.30 +2.14918 0.00 19.580 0 0.8710 5.7090 98.50 1.6232 5 403.0 14.70 261.95 15.79 19.40 +1.41385 0.00 19.580 1 0.8710 6.1290 96.00 1.7494 5 403.0 14.70 321.02 15.12 17.00 +3.53501 0.00 19.580 1 0.8710 6.1520 82.60 1.7455 5 403.0 14.70 88.01 15.02 15.60 +2.44668 0.00 19.580 0 0.8710 5.2720 94.00 1.7364 5 403.0 14.70 88.63 16.14 13.10 +1.22358 0.00 19.580 0 0.6050 6.9430 97.40 1.8773 5 403.0 14.70 363.43 4.59 41.30 +1.34284 0.00 19.580 0 0.6050 6.0660 100.00 1.7573 5 403.0 14.70 353.89 6.43 24.30 +1.42502 0.00 19.580 0 0.8710 6.5100 100.00 1.7659 5 403.0 14.70 364.31 7.39 23.30 +1.27346 0.00 19.580 1 0.6050 6.2500 92.60 1.7984 5 403.0 14.70 338.92 5.50 27.00 +1.46336 0.00 19.580 0 0.6050 7.4890 90.80 1.9709 5 403.0 14.70 374.43 1.73 50.00 +1.83377 0.00 19.580 1 0.6050 7.8020 98.20 2.0407 5 403.0 14.70 389.61 1.92 50.00 +1.51902 0.00 19.580 1 0.6050 8.3750 93.90 2.1620 5 403.0 14.70 388.45 3.32 50.00 +2.24236 0.00 19.580 0 0.6050 5.8540 91.80 2.4220 5 403.0 14.70 395.11 11.64 22.70 +2.92400 0.00 19.580 0 0.6050 6.1010 93.00 2.2834 5 403.0 14.70 240.16 9.81 25.00 +2.01019 0.00 19.580 0 0.6050 7.9290 96.20 2.0459 5 403.0 14.70 369.30 3.70 50.00 +1.80028 0.00 19.580 0 0.6050 5.8770 79.20 2.4259 5 403.0 14.70 227.61 12.14 23.80 +2.30040 0.00 19.580 0 0.6050 6.3190 96.10 2.1000 5 403.0 14.70 297.09 11.10 23.80 +2.44953 0.00 19.580 0 0.6050 6.4020 95.20 2.2625 5 403.0 14.70 330.04 11.32 22.30 +1.20742 0.00 19.580 0 0.6050 5.8750 94.60 2.4259 5 403.0 14.70 292.29 14.43 17.40 +2.31390 0.00 19.580 0 0.6050 5.8800 97.30 2.3887 5 403.0 14.70 348.13 12.03 19.10 +0.13914 0.00 4.050 0 0.5100 5.5720 88.50 2.5961 5 296.0 16.60 396.90 14.69 23.10 +0.09178 0.00 4.050 0 0.5100 6.4160 84.10 2.6463 5 296.0 16.60 395.50 9.04 23.60 +0.08447 0.00 4.050 0 0.5100 5.8590 68.70 2.7019 5 296.0 16.60 393.23 9.64 22.60 +0.06664 0.00 4.050 0 0.5100 6.5460 33.10 3.1323 5 296.0 16.60 390.96 5.33 29.40 +0.07022 0.00 4.050 0 0.5100 6.0200 47.20 3.5549 5 296.0 16.60 393.23 10.11 23.20 +0.05425 0.00 4.050 0 0.5100 6.3150 73.40 3.3175 5 296.0 16.60 395.60 6.29 24.60 +0.06642 0.00 4.050 0 0.5100 6.8600 74.40 2.9153 5 296.0 16.60 391.27 6.92 29.90 +0.05780 0.00 2.460 0 0.4880 6.9800 58.40 2.8290 3 193.0 17.80 396.90 5.04 37.20 +0.06588 0.00 2.460 0 0.4880 7.7650 83.30 2.7410 3 193.0 17.80 395.56 7.56 39.80 +0.06888 0.00 2.460 0 0.4880 6.1440 62.20 2.5979 3 193.0 17.80 396.90 9.45 36.20 +0.09103 0.00 2.460 0 0.4880 7.1550 92.20 2.7006 3 193.0 17.80 394.12 4.82 37.90 +0.10008 0.00 2.460 0 0.4880 6.5630 95.60 2.8470 3 193.0 17.80 396.90 5.68 32.50 +0.08308 0.00 2.460 0 0.4880 5.6040 89.80 2.9879 3 193.0 17.80 391.00 13.98 26.40 +0.06047 0.00 2.460 0 0.4880 6.1530 68.80 3.2797 3 193.0 17.80 387.11 13.15 29.60 +0.05602 0.00 2.460 0 0.4880 7.8310 53.60 3.1992 3 193.0 17.80 392.63 4.45 50.00 +0.07875 45.00 3.440 0 0.4370 6.7820 41.10 3.7886 5 398.0 15.20 393.87 6.68 32.00 +0.12579 45.00 3.440 0 0.4370 6.5560 29.10 4.5667 5 398.0 15.20 382.84 4.56 29.80 +0.08370 45.00 3.440 0 0.4370 7.1850 38.90 4.5667 5 398.0 15.20 396.90 5.39 34.90 +0.09068 45.00 3.440 0 0.4370 6.9510 21.50 6.4798 5 398.0 15.20 377.68 5.10 37.00 +0.06911 45.00 3.440 0 0.4370 6.7390 30.80 6.4798 5 398.0 15.20 389.71 4.69 30.50 +0.08664 45.00 3.440 0 0.4370 7.1780 26.30 6.4798 5 398.0 15.20 390.49 2.87 36.40 +0.02187 60.00 2.930 0 0.4010 6.8000 9.90 6.2196 1 265.0 15.60 393.37 5.03 31.10 +0.01439 60.00 2.930 0 0.4010 6.6040 18.80 6.2196 1 265.0 15.60 376.70 4.38 29.10 +0.01381 80.00 0.460 0 0.4220 7.8750 32.00 5.6484 4 255.0 14.40 394.23 2.97 50.00 +0.04011 80.00 1.520 0 0.4040 7.2870 34.10 7.3090 2 329.0 12.60 396.90 4.08 33.30 +0.04666 80.00 1.520 0 0.4040 7.1070 36.60 7.3090 2 329.0 12.60 354.31 8.61 30.30 +0.03768 80.00 1.520 0 0.4040 7.2740 38.30 7.3090 2 329.0 12.60 392.20 6.62 34.60 +0.03150 95.00 1.470 0 0.4030 6.9750 15.30 7.6534 3 402.0 17.00 396.90 4.56 34.90 +0.01778 95.00 1.470 0 0.4030 7.1350 13.90 7.6534 3 402.0 17.00 384.30 4.45 32.90 +0.03445 82.50 2.030 0 0.4150 6.1620 38.40 6.2700 2 348.0 14.70 393.77 7.43 24.10 +0.02177 82.50 2.030 0 0.4150 7.6100 15.70 6.2700 2 348.0 14.70 395.38 3.11 42.30 +0.03510 95.00 2.680 0 0.4161 7.8530 33.20 5.1180 4 224.0 14.70 392.78 3.81 48.50 +0.02009 95.00 2.680 0 0.4161 8.0340 31.90 5.1180 4 224.0 14.70 390.55 2.88 50.00 +0.13642 0.00 10.590 0 0.4890 5.8910 22.30 3.9454 4 277.0 18.60 396.90 10.87 22.60 +0.22969 0.00 10.590 0 0.4890 6.3260 52.50 4.3549 4 277.0 18.60 394.87 10.97 24.40 +0.25199 0.00 10.590 0 0.4890 5.7830 72.70 4.3549 4 277.0 18.60 389.43 18.06 22.50 +0.13587 0.00 10.590 1 0.4890 6.0640 59.10 4.2392 4 277.0 18.60 381.32 14.66 24.40 +0.43571 0.00 10.590 1 0.4890 5.3440 100.00 3.8750 4 277.0 18.60 396.90 23.09 20.00 +0.17446 0.00 10.590 1 0.4890 5.9600 92.10 3.8771 4 277.0 18.60 393.25 17.27 21.70 +0.37578 0.00 10.590 1 0.4890 5.4040 88.60 3.6650 4 277.0 18.60 395.24 23.98 19.30 +0.21719 0.00 10.590 1 0.4890 5.8070 53.80 3.6526 4 277.0 18.60 390.94 16.03 22.40 +0.14052 0.00 10.590 0 0.4890 6.3750 32.30 3.9454 4 277.0 18.60 385.81 9.38 28.10 +0.28955 0.00 10.590 0 0.4890 5.4120 9.80 3.5875 4 277.0 18.60 348.93 29.55 23.70 +0.19802 0.00 10.590 0 0.4890 6.1820 42.40 3.9454 4 277.0 18.60 393.63 9.47 25.00 +0.04560 0.00 13.890 1 0.5500 5.8880 56.00 3.1121 5 276.0 16.40 392.80 13.51 23.30 +0.07013 0.00 13.890 0 0.5500 6.6420 85.10 3.4211 5 276.0 16.40 392.78 9.69 28.70 +0.11069 0.00 13.890 1 0.5500 5.9510 93.80 2.8893 5 276.0 16.40 396.90 17.92 21.50 +0.11425 0.00 13.890 1 0.5500 6.3730 92.40 3.3633 5 276.0 16.40 393.74 10.50 23.00 +0.35809 0.00 6.200 1 0.5070 6.9510 88.50 2.8617 8 307.0 17.40 391.70 9.71 26.70 +0.40771 0.00 6.200 1 0.5070 6.1640 91.30 3.0480 8 307.0 17.40 395.24 21.46 21.70 +0.62356 0.00 6.200 1 0.5070 6.8790 77.70 3.2721 8 307.0 17.40 390.39 9.93 27.50 +0.61470 0.00 6.200 0 0.5070 6.6180 80.80 3.2721 8 307.0 17.40 396.90 7.60 30.10 +0.31533 0.00 6.200 0 0.5040 8.2660 78.30 2.8944 8 307.0 17.40 385.05 4.14 44.80 +0.52693 0.00 6.200 0 0.5040 8.7250 83.00 2.8944 8 307.0 17.40 382.00 4.63 50.00 +0.38214 0.00 6.200 0 0.5040 8.0400 86.50 3.2157 8 307.0 17.40 387.38 3.13 37.60 +0.41238 0.00 6.200 0 0.5040 7.1630 79.90 3.2157 8 307.0 17.40 372.08 6.36 31.60 +0.29819 0.00 6.200 0 0.5040 7.6860 17.00 3.3751 8 307.0 17.40 377.51 3.92 46.70 +0.44178 0.00 6.200 0 0.5040 6.5520 21.40 3.3751 8 307.0 17.40 380.34 3.76 31.50 +0.53700 0.00 6.200 0 0.5040 5.9810 68.10 3.6715 8 307.0 17.40 378.35 11.65 24.30 +0.46296 0.00 6.200 0 0.5040 7.4120 76.90 3.6715 8 307.0 17.40 376.14 5.25 31.70 +0.57529 0.00 6.200 0 0.5070 8.3370 73.30 3.8384 8 307.0 17.40 385.91 2.47 41.70 +0.33147 0.00 6.200 0 0.5070 8.2470 70.40 3.6519 8 307.0 17.40 378.95 3.95 48.30 +0.44791 0.00 6.200 1 0.5070 6.7260 66.50 3.6519 8 307.0 17.40 360.20 8.05 29.00 +0.33045 0.00 6.200 0 0.5070 6.0860 61.50 3.6519 8 307.0 17.40 376.75 10.88 24.00 +0.52058 0.00 6.200 1 0.5070 6.6310 76.50 4.1480 8 307.0 17.40 388.45 9.54 25.10 +0.51183 0.00 6.200 0 0.5070 7.3580 71.60 4.1480 8 307.0 17.40 390.07 4.73 31.50 +0.08244 30.00 4.930 0 0.4280 6.4810 18.50 6.1899 6 300.0 16.60 379.41 6.36 23.70 +0.09252 30.00 4.930 0 0.4280 6.6060 42.20 6.1899 6 300.0 16.60 383.78 7.37 23.30 +0.11329 30.00 4.930 0 0.4280 6.8970 54.30 6.3361 6 300.0 16.60 391.25 11.38 22.00 +0.10612 30.00 4.930 0 0.4280 6.0950 65.10 6.3361 6 300.0 16.60 394.62 12.40 20.10 +0.10290 30.00 4.930 0 0.4280 6.3580 52.90 7.0355 6 300.0 16.60 372.75 11.22 22.20 +0.12757 30.00 4.930 0 0.4280 6.3930 7.80 7.0355 6 300.0 16.60 374.71 5.19 23.70 +0.20608 22.00 5.860 0 0.4310 5.5930 76.50 7.9549 7 330.0 19.10 372.49 12.50 17.60 +0.19133 22.00 5.860 0 0.4310 5.6050 70.20 7.9549 7 330.0 19.10 389.13 18.46 18.50 +0.33983 22.00 5.860 0 0.4310 6.1080 34.90 8.0555 7 330.0 19.10 390.18 9.16 24.30 +0.19657 22.00 5.860 0 0.4310 6.2260 79.20 8.0555 7 330.0 19.10 376.14 10.15 20.50 +0.16439 22.00 5.860 0 0.4310 6.4330 49.10 7.8265 7 330.0 19.10 374.71 9.52 24.50 +0.19073 22.00 5.860 0 0.4310 6.7180 17.50 7.8265 7 330.0 19.10 393.74 6.56 26.20 +0.14030 22.00 5.860 0 0.4310 6.4870 13.00 7.3967 7 330.0 19.10 396.28 5.90 24.40 +0.21409 22.00 5.860 0 0.4310 6.4380 8.90 7.3967 7 330.0 19.10 377.07 3.59 24.80 +0.08221 22.00 5.860 0 0.4310 6.9570 6.80 8.9067 7 330.0 19.10 386.09 3.53 29.60 +0.36894 22.00 5.860 0 0.4310 8.2590 8.40 8.9067 7 330.0 19.10 396.90 3.54 42.80 +0.04819 80.00 3.640 0 0.3920 6.1080 32.00 9.2203 1 315.0 16.40 392.89 6.57 21.90 +0.03548 80.00 3.640 0 0.3920 5.8760 19.10 9.2203 1 315.0 16.40 395.18 9.25 20.90 +0.01538 90.00 3.750 0 0.3940 7.4540 34.20 6.3361 3 244.0 15.90 386.34 3.11 44.00 +0.61154 20.00 3.970 0 0.6470 8.7040 86.90 1.8010 5 264.0 13.00 389.70 5.12 50.00 +0.66351 20.00 3.970 0 0.6470 7.3330 100.00 1.8946 5 264.0 13.00 383.29 7.79 36.00 +0.65665 20.00 3.970 0 0.6470 6.8420 100.00 2.0107 5 264.0 13.00 391.93 6.90 30.10 +0.54011 20.00 3.970 0 0.6470 7.2030 81.80 2.1121 5 264.0 13.00 392.80 9.59 33.80 +0.53412 20.00 3.970 0 0.6470 7.5200 89.40 2.1398 5 264.0 13.00 388.37 7.26 43.10 +0.52014 20.00 3.970 0 0.6470 8.3980 91.50 2.2885 5 264.0 13.00 386.86 5.91 48.80 +0.82526 20.00 3.970 0 0.6470 7.3270 94.50 2.0788 5 264.0 13.00 393.42 11.25 31.00 +0.55007 20.00 3.970 0 0.6470 7.2060 91.60 1.9301 5 264.0 13.00 387.89 8.10 36.50 +0.76162 20.00 3.970 0 0.6470 5.5600 62.80 1.9865 5 264.0 13.00 392.40 10.45 22.80 +0.78570 20.00 3.970 0 0.6470 7.0140 84.60 2.1329 5 264.0 13.00 384.07 14.79 30.70 +0.57834 20.00 3.970 0 0.5750 8.2970 67.00 2.4216 5 264.0 13.00 384.54 7.44 50.00 +0.54050 20.00 3.970 0 0.5750 7.4700 52.60 2.8720 5 264.0 13.00 390.30 3.16 43.50 +0.09065 20.00 6.960 1 0.4640 5.9200 61.50 3.9175 3 223.0 18.60 391.34 13.65 20.70 +0.29916 20.00 6.960 0 0.4640 5.8560 42.10 4.4290 3 223.0 18.60 388.65 13.00 21.10 +0.16211 20.00 6.960 0 0.4640 6.2400 16.30 4.4290 3 223.0 18.60 396.90 6.59 25.20 +0.11460 20.00 6.960 0 0.4640 6.5380 58.70 3.9175 3 223.0 18.60 394.96 7.73 24.40 +0.22188 20.00 6.960 1 0.4640 7.6910 51.80 4.3665 3 223.0 18.60 390.77 6.58 35.20 +0.05644 40.00 6.410 1 0.4470 6.7580 32.90 4.0776 4 254.0 17.60 396.90 3.53 32.40 +0.09604 40.00 6.410 0 0.4470 6.8540 42.80 4.2673 4 254.0 17.60 396.90 2.98 32.00 +0.10469 40.00 6.410 1 0.4470 7.2670 49.00 4.7872 4 254.0 17.60 389.25 6.05 33.20 +0.06127 40.00 6.410 1 0.4470 6.8260 27.60 4.8628 4 254.0 17.60 393.45 4.16 33.10 +0.07978 40.00 6.410 0 0.4470 6.4820 32.10 4.1403 4 254.0 17.60 396.90 7.19 29.10 +0.21038 20.00 3.330 0 0.4429 6.8120 32.20 4.1007 5 216.0 14.90 396.90 4.85 35.10 +0.03578 20.00 3.330 0 0.4429 7.8200 64.50 4.6947 5 216.0 14.90 387.31 3.76 45.40 +0.03705 20.00 3.330 0 0.4429 6.9680 37.20 5.2447 5 216.0 14.90 392.23 4.59 35.40 +0.06129 20.00 3.330 1 0.4429 7.6450 49.70 5.2119 5 216.0 14.90 377.07 3.01 46.00 +0.01501 90.00 1.210 1 0.4010 7.9230 24.80 5.8850 1 198.0 13.60 395.52 3.16 50.00 +0.00906 90.00 2.970 0 0.4000 7.0880 20.80 7.3073 1 285.0 15.30 394.72 7.85 32.20 +0.01096 55.00 2.250 0 0.3890 6.4530 31.90 7.3073 1 300.0 15.30 394.72 8.23 22.00 +0.01965 80.00 1.760 0 0.3850 6.2300 31.50 9.0892 1 241.0 18.20 341.60 12.93 20.10 +0.03871 52.50 5.320 0 0.4050 6.2090 31.30 7.3172 6 293.0 16.60 396.90 7.14 23.20 +0.04590 52.50 5.320 0 0.4050 6.3150 45.60 7.3172 6 293.0 16.60 396.90 7.60 22.30 +0.04297 52.50 5.320 0 0.4050 6.5650 22.90 7.3172 6 293.0 16.60 371.72 9.51 24.80 +0.03502 80.00 4.950 0 0.4110 6.8610 27.90 5.1167 4 245.0 19.20 396.90 3.33 28.50 +0.07886 80.00 4.950 0 0.4110 7.1480 27.70 5.1167 4 245.0 19.20 396.90 3.56 37.30 +0.03615 80.00 4.950 0 0.4110 6.6300 23.40 5.1167 4 245.0 19.20 396.90 4.70 27.90 +0.08265 0.00 13.920 0 0.4370 6.1270 18.40 5.5027 4 289.0 16.00 396.90 8.58 23.90 +0.08199 0.00 13.920 0 0.4370 6.0090 42.30 5.5027 4 289.0 16.00 396.90 10.40 21.70 +0.12932 0.00 13.920 0 0.4370 6.6780 31.10 5.9604 4 289.0 16.00 396.90 6.27 28.60 +0.05372 0.00 13.920 0 0.4370 6.5490 51.00 5.9604 4 289.0 16.00 392.85 7.39 27.10 +0.14103 0.00 13.920 0 0.4370 5.7900 58.00 6.3200 4 289.0 16.00 396.90 15.84 20.30 +0.06466 70.00 2.240 0 0.4000 6.3450 20.10 7.8278 5 358.0 14.80 368.24 4.97 22.50 +0.05561 70.00 2.240 0 0.4000 7.0410 10.00 7.8278 5 358.0 14.80 371.58 4.74 29.00 +0.04417 70.00 2.240 0 0.4000 6.8710 47.40 7.8278 5 358.0 14.80 390.86 6.07 24.80 +0.03537 34.00 6.090 0 0.4330 6.5900 40.40 5.4917 7 329.0 16.10 395.75 9.50 22.00 +0.09266 34.00 6.090 0 0.4330 6.4950 18.40 5.4917 7 329.0 16.10 383.61 8.67 26.40 +0.10000 34.00 6.090 0 0.4330 6.9820 17.70 5.4917 7 329.0 16.10 390.43 4.86 33.10 +0.05515 33.00 2.180 0 0.4720 7.2360 41.10 4.0220 7 222.0 18.40 393.68 6.93 36.10 +0.05479 33.00 2.180 0 0.4720 6.6160 58.10 3.3700 7 222.0 18.40 393.36 8.93 28.40 +0.07503 33.00 2.180 0 0.4720 7.4200 71.90 3.0992 7 222.0 18.40 396.90 6.47 33.40 +0.04932 33.00 2.180 0 0.4720 6.8490 70.30 3.1827 7 222.0 18.40 396.90 7.53 28.20 +0.49298 0.00 9.900 0 0.5440 6.6350 82.50 3.3175 4 304.0 18.40 396.90 4.54 22.80 +0.34940 0.00 9.900 0 0.5440 5.9720 76.70 3.1025 4 304.0 18.40 396.24 9.97 20.30 +2.63548 0.00 9.900 0 0.5440 4.9730 37.80 2.5194 4 304.0 18.40 350.45 12.64 16.10 +0.79041 0.00 9.900 0 0.5440 6.1220 52.80 2.6403 4 304.0 18.40 396.90 5.98 22.10 +0.26169 0.00 9.900 0 0.5440 6.0230 90.40 2.8340 4 304.0 18.40 396.30 11.72 19.40 +0.26938 0.00 9.900 0 0.5440 6.2660 82.80 3.2628 4 304.0 18.40 393.39 7.90 21.60 +0.36920 0.00 9.900 0 0.5440 6.5670 87.30 3.6023 4 304.0 18.40 395.69 9.28 23.80 +0.25356 0.00 9.900 0 0.5440 5.7050 77.70 3.9450 4 304.0 18.40 396.42 11.50 16.20 +0.31827 0.00 9.900 0 0.5440 5.9140 83.20 3.9986 4 304.0 18.40 390.70 18.33 17.80 +0.24522 0.00 9.900 0 0.5440 5.7820 71.70 4.0317 4 304.0 18.40 396.90 15.94 19.80 +0.40202 0.00 9.900 0 0.5440 6.3820 67.20 3.5325 4 304.0 18.40 395.21 10.36 23.10 +0.47547 0.00 9.900 0 0.5440 6.1130 58.80 4.0019 4 304.0 18.40 396.23 12.73 21.00 +0.16760 0.00 7.380 0 0.4930 6.4260 52.30 4.5404 5 287.0 19.60 396.90 7.20 23.80 +0.18159 0.00 7.380 0 0.4930 6.3760 54.30 4.5404 5 287.0 19.60 396.90 6.87 23.10 +0.35114 0.00 7.380 0 0.4930 6.0410 49.90 4.7211 5 287.0 19.60 396.90 7.70 20.40 +0.28392 0.00 7.380 0 0.4930 5.7080 74.30 4.7211 5 287.0 19.60 391.13 11.74 18.50 +0.34109 0.00 7.380 0 0.4930 6.4150 40.10 4.7211 5 287.0 19.60 396.90 6.12 25.00 +0.19186 0.00 7.380 0 0.4930 6.4310 14.70 5.4159 5 287.0 19.60 393.68 5.08 24.60 +0.30347 0.00 7.380 0 0.4930 6.3120 28.90 5.4159 5 287.0 19.60 396.90 6.15 23.00 +0.24103 0.00 7.380 0 0.4930 6.0830 43.70 5.4159 5 287.0 19.60 396.90 12.79 22.20 +0.06617 0.00 3.240 0 0.4600 5.8680 25.80 5.2146 4 430.0 16.90 382.44 9.97 19.30 +0.06724 0.00 3.240 0 0.4600 6.3330 17.20 5.2146 4 430.0 16.90 375.21 7.34 22.60 +0.04544 0.00 3.240 0 0.4600 6.1440 32.20 5.8736 4 430.0 16.90 368.57 9.09 19.80 +0.05023 35.00 6.060 0 0.4379 5.7060 28.40 6.6407 1 304.0 16.90 394.02 12.43 17.10 +0.03466 35.00 6.060 0 0.4379 6.0310 23.30 6.6407 1 304.0 16.90 362.25 7.83 19.40 +0.05083 0.00 5.190 0 0.5150 6.3160 38.10 6.4584 5 224.0 20.20 389.71 5.68 22.20 +0.03738 0.00 5.190 0 0.5150 6.3100 38.50 6.4584 5 224.0 20.20 389.40 6.75 20.70 +0.03961 0.00 5.190 0 0.5150 6.0370 34.50 5.9853 5 224.0 20.20 396.90 8.01 21.10 +0.03427 0.00 5.190 0 0.5150 5.8690 46.30 5.2311 5 224.0 20.20 396.90 9.80 19.50 +0.03041 0.00 5.190 0 0.5150 5.8950 59.60 5.6150 5 224.0 20.20 394.81 10.56 18.50 +0.03306 0.00 5.190 0 0.5150 6.0590 37.30 4.8122 5 224.0 20.20 396.14 8.51 20.60 +0.05497 0.00 5.190 0 0.5150 5.9850 45.40 4.8122 5 224.0 20.20 396.90 9.74 19.00 +0.06151 0.00 5.190 0 0.5150 5.9680 58.50 4.8122 5 224.0 20.20 396.90 9.29 18.70 +0.01301 35.00 1.520 0 0.4420 7.2410 49.30 7.0379 1 284.0 15.50 394.74 5.49 32.70 +0.02498 0.00 1.890 0 0.5180 6.5400 59.70 6.2669 1 422.0 15.90 389.96 8.65 16.50 +0.02543 55.00 3.780 0 0.4840 6.6960 56.40 5.7321 5 370.0 17.60 396.90 7.18 23.90 +0.03049 55.00 3.780 0 0.4840 6.8740 28.10 6.4654 5 370.0 17.60 387.97 4.61 31.20 +0.03113 0.00 4.390 0 0.4420 6.0140 48.50 8.0136 3 352.0 18.80 385.64 10.53 17.50 +0.06162 0.00 4.390 0 0.4420 5.8980 52.30 8.0136 3 352.0 18.80 364.61 12.67 17.20 +0.01870 85.00 4.150 0 0.4290 6.5160 27.70 8.5353 4 351.0 17.90 392.43 6.36 23.10 +0.01501 80.00 2.010 0 0.4350 6.6350 29.70 8.3440 4 280.0 17.00 390.94 5.99 24.50 +0.02899 40.00 1.250 0 0.4290 6.9390 34.50 8.7921 1 335.0 19.70 389.85 5.89 26.60 +0.06211 40.00 1.250 0 0.4290 6.4900 44.40 8.7921 1 335.0 19.70 396.90 5.98 22.90 +0.07950 60.00 1.690 0 0.4110 6.5790 35.90 10.7103 4 411.0 18.30 370.78 5.49 24.10 +0.07244 60.00 1.690 0 0.4110 5.8840 18.50 10.7103 4 411.0 18.30 392.33 7.79 18.60 +0.01709 90.00 2.020 0 0.4100 6.7280 36.10 12.1265 5 187.0 17.00 384.46 4.50 30.10 +0.04301 80.00 1.910 0 0.4130 5.6630 21.90 10.5857 4 334.0 22.00 382.80 8.05 18.20 +0.10659 80.00 1.910 0 0.4130 5.9360 19.50 10.5857 4 334.0 22.00 376.04 5.57 20.60 +8.98296 0.00 18.100 1 0.7700 6.2120 97.40 2.1222 24 666.0 20.20 377.73 17.60 17.80 +3.84970 0.00 18.100 1 0.7700 6.3950 91.00 2.5052 24 666.0 20.20 391.34 13.27 21.70 +5.20177 0.00 18.100 1 0.7700 6.1270 83.40 2.7227 24 666.0 20.20 395.43 11.48 22.70 +4.26131 0.00 18.100 0 0.7700 6.1120 81.30 2.5091 24 666.0 20.20 390.74 12.67 22.60 +4.54192 0.00 18.100 0 0.7700 6.3980 88.00 2.5182 24 666.0 20.20 374.56 7.79 25.00 +3.83684 0.00 18.100 0 0.7700 6.2510 91.10 2.2955 24 666.0 20.20 350.65 14.19 19.90 +3.67822 0.00 18.100 0 0.7700 5.3620 96.20 2.1036 24 666.0 20.20 380.79 10.19 20.80 +4.22239 0.00 18.100 1 0.7700 5.8030 89.00 1.9047 24 666.0 20.20 353.04 14.64 16.80 +3.47428 0.00 18.100 1 0.7180 8.7800 82.90 1.9047 24 666.0 20.20 354.55 5.29 21.90 +4.55587 0.00 18.100 0 0.7180 3.5610 87.90 1.6132 24 666.0 20.20 354.70 7.12 27.50 +3.69695 0.00 18.100 0 0.7180 4.9630 91.40 1.7523 24 666.0 20.20 316.03 14.00 21.90 +13.52220 0.00 18.100 0 0.6310 3.8630 100.00 1.5106 24 666.0 20.20 131.42 13.33 23.10 +4.89822 0.00 18.100 0 0.6310 4.9700 100.00 1.3325 24 666.0 20.20 375.52 3.26 50.00 +5.66998 0.00 18.100 1 0.6310 6.6830 96.80 1.3567 24 666.0 20.20 375.33 3.73 50.00 +6.53876 0.00 18.100 1 0.6310 7.0160 97.50 1.2024 24 666.0 20.20 392.05 2.96 50.00 +9.23230 0.00 18.100 0 0.6310 6.2160 100.00 1.1691 24 666.0 20.20 366.15 9.53 50.00 +8.26725 0.00 18.100 1 0.6680 5.8750 89.60 1.1296 24 666.0 20.20 347.88 8.88 50.00 +11.10810 0.00 18.100 0 0.6680 4.9060 100.00 1.1742 24 666.0 20.20 396.90 34.77 13.80 +18.49820 0.00 18.100 0 0.6680 4.1380 100.00 1.1370 24 666.0 20.20 396.90 37.97 13.80 +19.60910 0.00 18.100 0 0.6710 7.3130 97.90 1.3163 24 666.0 20.20 396.90 13.44 15.00 +15.28800 0.00 18.100 0 0.6710 6.6490 93.30 1.3449 24 666.0 20.20 363.02 23.24 13.90 +9.82349 0.00 18.100 0 0.6710 6.7940 98.80 1.3580 24 666.0 20.20 396.90 21.24 13.30 +23.64820 0.00 18.100 0 0.6710 6.3800 96.20 1.3861 24 666.0 20.20 396.90 23.69 13.10 +17.86670 0.00 18.100 0 0.6710 6.2230 100.00 1.3861 24 666.0 20.20 393.74 21.78 10.20 +88.97620 0.00 18.100 0 0.6710 6.9680 91.90 1.4165 24 666.0 20.20 396.90 17.21 10.40 +15.87440 0.00 18.100 0 0.6710 6.5450 99.10 1.5192 24 666.0 20.20 396.90 21.08 10.90 +9.18702 0.00 18.100 0 0.7000 5.5360 100.00 1.5804 24 666.0 20.20 396.90 23.60 11.30 +7.99248 0.00 18.100 0 0.7000 5.5200 100.00 1.5331 24 666.0 20.20 396.90 24.56 12.30 +20.08490 0.00 18.100 0 0.7000 4.3680 91.20 1.4395 24 666.0 20.20 285.83 30.63 8.80 +16.81180 0.00 18.100 0 0.7000 5.2770 98.10 1.4261 24 666.0 20.20 396.90 30.81 7.20 +24.39380 0.00 18.100 0 0.7000 4.6520 100.00 1.4672 24 666.0 20.20 396.90 28.28 10.50 +22.59710 0.00 18.100 0 0.7000 5.0000 89.50 1.5184 24 666.0 20.20 396.90 31.99 7.40 +14.33370 0.00 18.100 0 0.7000 4.8800 100.00 1.5895 24 666.0 20.20 372.92 30.62 10.20 +8.15174 0.00 18.100 0 0.7000 5.3900 98.90 1.7281 24 666.0 20.20 396.90 20.85 11.50 +6.96215 0.00 18.100 0 0.7000 5.7130 97.00 1.9265 24 666.0 20.20 394.43 17.11 15.10 +5.29305 0.00 18.100 0 0.7000 6.0510 82.50 2.1678 24 666.0 20.20 378.38 18.76 23.20 +11.57790 0.00 18.100 0 0.7000 5.0360 97.00 1.7700 24 666.0 20.20 396.90 25.68 9.70 +8.64476 0.00 18.100 0 0.6930 6.1930 92.60 1.7912 24 666.0 20.20 396.90 15.17 13.80 +13.35980 0.00 18.100 0 0.6930 5.8870 94.70 1.7821 24 666.0 20.20 396.90 16.35 12.70 +8.71675 0.00 18.100 0 0.6930 6.4710 98.80 1.7257 24 666.0 20.20 391.98 17.12 13.10 +5.87205 0.00 18.100 0 0.6930 6.4050 96.00 1.6768 24 666.0 20.20 396.90 19.37 12.50 +7.67202 0.00 18.100 0 0.6930 5.7470 98.90 1.6334 24 666.0 20.20 393.10 19.92 8.50 +38.35180 0.00 18.100 0 0.6930 5.4530 100.00 1.4896 24 666.0 20.20 396.90 30.59 5.00 +9.91655 0.00 18.100 0 0.6930 5.8520 77.80 1.5004 24 666.0 20.20 338.16 29.97 6.30 +25.04610 0.00 18.100 0 0.6930 5.9870 100.00 1.5888 24 666.0 20.20 396.90 26.77 5.60 +14.23620 0.00 18.100 0 0.6930 6.3430 100.00 1.5741 24 666.0 20.20 396.90 20.32 7.20 +9.59571 0.00 18.100 0 0.6930 6.4040 100.00 1.6390 24 666.0 20.20 376.11 20.31 12.10 +24.80170 0.00 18.100 0 0.6930 5.3490 96.00 1.7028 24 666.0 20.20 396.90 19.77 8.30 +41.52920 0.00 18.100 0 0.6930 5.5310 85.40 1.6074 24 666.0 20.20 329.46 27.38 8.50 +67.92080 0.00 18.100 0 0.6930 5.6830 100.00 1.4254 24 666.0 20.20 384.97 22.98 5.00 +20.71620 0.00 18.100 0 0.6590 4.1380 100.00 1.1781 24 666.0 20.20 370.22 23.34 11.90 +11.95110 0.00 18.100 0 0.6590 5.6080 100.00 1.2852 24 666.0 20.20 332.09 12.13 27.90 +7.40389 0.00 18.100 0 0.5970 5.6170 97.90 1.4547 24 666.0 20.20 314.64 26.40 17.20 +14.43830 0.00 18.100 0 0.5970 6.8520 100.00 1.4655 24 666.0 20.20 179.36 19.78 27.50 +51.13580 0.00 18.100 0 0.5970 5.7570 100.00 1.4130 24 666.0 20.20 2.60 10.11 15.00 +14.05070 0.00 18.100 0 0.5970 6.6570 100.00 1.5275 24 666.0 20.20 35.05 21.22 17.20 +18.81100 0.00 18.100 0 0.5970 4.6280 100.00 1.5539 24 666.0 20.20 28.79 34.37 17.90 +28.65580 0.00 18.100 0 0.5970 5.1550 100.00 1.5894 24 666.0 20.20 210.97 20.08 16.30 +45.74610 0.00 18.100 0 0.6930 4.5190 100.00 1.6582 24 666.0 20.20 88.27 36.98 7.00 +18.08460 0.00 18.100 0 0.6790 6.4340 100.00 1.8347 24 666.0 20.20 27.25 29.05 7.20 +10.83420 0.00 18.100 0 0.6790 6.7820 90.80 1.8195 24 666.0 20.20 21.57 25.79 7.50 +25.94060 0.00 18.100 0 0.6790 5.3040 89.10 1.6475 24 666.0 20.20 127.36 26.64 10.40 +73.53410 0.00 18.100 0 0.6790 5.9570 100.00 1.8026 24 666.0 20.20 16.45 20.62 8.80 +11.81230 0.00 18.100 0 0.7180 6.8240 76.50 1.7940 24 666.0 20.20 48.45 22.74 8.40 +11.08740 0.00 18.100 0 0.7180 6.4110 100.00 1.8589 24 666.0 20.20 318.75 15.02 16.70 +7.02259 0.00 18.100 0 0.7180 6.0060 95.30 1.8746 24 666.0 20.20 319.98 15.70 14.20 +12.04820 0.00 18.100 0 0.6140 5.6480 87.60 1.9512 24 666.0 20.20 291.55 14.10 20.80 +7.05042 0.00 18.100 0 0.6140 6.1030 85.10 2.0218 24 666.0 20.20 2.52 23.29 13.40 +8.79212 0.00 18.100 0 0.5840 5.5650 70.60 2.0635 24 666.0 20.20 3.65 17.16 11.70 +15.86030 0.00 18.100 0 0.6790 5.8960 95.40 1.9096 24 666.0 20.20 7.68 24.39 8.30 +12.24720 0.00 18.100 0 0.5840 5.8370 59.70 1.9976 24 666.0 20.20 24.65 15.69 10.20 +37.66190 0.00 18.100 0 0.6790 6.2020 78.70 1.8629 24 666.0 20.20 18.82 14.52 10.90 +7.36711 0.00 18.100 0 0.6790 6.1930 78.10 1.9356 24 666.0 20.20 96.73 21.52 11.00 +9.33889 0.00 18.100 0 0.6790 6.3800 95.60 1.9682 24 666.0 20.20 60.72 24.08 9.50 +8.49213 0.00 18.100 0 0.5840 6.3480 86.10 2.0527 24 666.0 20.20 83.45 17.64 14.50 +10.06230 0.00 18.100 0 0.5840 6.8330 94.30 2.0882 24 666.0 20.20 81.33 19.69 14.10 +6.44405 0.00 18.100 0 0.5840 6.4250 74.80 2.2004 24 666.0 20.20 97.95 12.03 16.10 +5.58107 0.00 18.100 0 0.7130 6.4360 87.90 2.3158 24 666.0 20.20 100.19 16.22 14.30 +13.91340 0.00 18.100 0 0.7130 6.2080 95.00 2.2222 24 666.0 20.20 100.63 15.17 11.70 +11.16040 0.00 18.100 0 0.7400 6.6290 94.60 2.1247 24 666.0 20.20 109.85 23.27 13.40 +14.42080 0.00 18.100 0 0.7400 6.4610 93.30 2.0026 24 666.0 20.20 27.49 18.05 9.60 +15.17720 0.00 18.100 0 0.7400 6.1520 100.00 1.9142 24 666.0 20.20 9.32 26.45 8.70 +13.67810 0.00 18.100 0 0.7400 5.9350 87.90 1.8206 24 666.0 20.20 68.95 34.02 8.40 +9.39063 0.00 18.100 0 0.7400 5.6270 93.90 1.8172 24 666.0 20.20 396.90 22.88 12.80 +22.05110 0.00 18.100 0 0.7400 5.8180 92.40 1.8662 24 666.0 20.20 391.45 22.11 10.50 +9.72418 0.00 18.100 0 0.7400 6.4060 97.20 2.0651 24 666.0 20.20 385.96 19.52 17.10 +5.66637 0.00 18.100 0 0.7400 6.2190 100.00 2.0048 24 666.0 20.20 395.69 16.59 18.40 +9.96654 0.00 18.100 0 0.7400 6.4850 100.00 1.9784 24 666.0 20.20 386.73 18.85 15.40 +12.80230 0.00 18.100 0 0.7400 5.8540 96.60 1.8956 24 666.0 20.20 240.52 23.79 10.80 +10.67180 0.00 18.100 0 0.7400 6.4590 94.80 1.9879 24 666.0 20.20 43.06 23.98 11.80 +6.28807 0.00 18.100 0 0.7400 6.3410 96.40 2.0720 24 666.0 20.20 318.01 17.79 14.90 +9.92485 0.00 18.100 0 0.7400 6.2510 96.60 2.1980 24 666.0 20.20 388.52 16.44 12.60 +9.32909 0.00 18.100 0 0.7130 6.1850 98.70 2.2616 24 666.0 20.20 396.90 18.13 14.10 +7.52601 0.00 18.100 0 0.7130 6.4170 98.30 2.1850 24 666.0 20.20 304.21 19.31 13.00 +6.71772 0.00 18.100 0 0.7130 6.7490 92.60 2.3236 24 666.0 20.20 0.32 17.44 13.40 +5.44114 0.00 18.100 0 0.7130 6.6550 98.20 2.3552 24 666.0 20.20 355.29 17.73 15.20 +5.09017 0.00 18.100 0 0.7130 6.2970 91.80 2.3682 24 666.0 20.20 385.09 17.27 16.10 +8.24809 0.00 18.100 0 0.7130 7.3930 99.30 2.4527 24 666.0 20.20 375.87 16.74 17.80 +9.51363 0.00 18.100 0 0.7130 6.7280 94.10 2.4961 24 666.0 20.20 6.68 18.71 14.90 +4.75237 0.00 18.100 0 0.7130 6.5250 86.50 2.4358 24 666.0 20.20 50.92 18.13 14.10 +4.66883 0.00 18.100 0 0.7130 5.9760 87.90 2.5806 24 666.0 20.20 10.48 19.01 12.70 +8.20058 0.00 18.100 0 0.7130 5.9360 80.30 2.7792 24 666.0 20.20 3.50 16.94 13.50 +7.75223 0.00 18.100 0 0.7130 6.3010 83.70 2.7831 24 666.0 20.20 272.21 16.23 14.90 +6.80117 0.00 18.100 0 0.7130 6.0810 84.40 2.7175 24 666.0 20.20 396.90 14.70 20.00 +4.81213 0.00 18.100 0 0.7130 6.7010 90.00 2.5975 24 666.0 20.20 255.23 16.42 16.40 +3.69311 0.00 18.100 0 0.7130 6.3760 88.40 2.5671 24 666.0 20.20 391.43 14.65 17.70 +6.65492 0.00 18.100 0 0.7130 6.3170 83.00 2.7344 24 666.0 20.20 396.90 13.99 19.50 +5.82115 0.00 18.100 0 0.7130 6.5130 89.90 2.8016 24 666.0 20.20 393.82 10.29 20.20 +7.83932 0.00 18.100 0 0.6550 6.2090 65.40 2.9634 24 666.0 20.20 396.90 13.22 21.40 +3.16360 0.00 18.100 0 0.6550 5.7590 48.20 3.0665 24 666.0 20.20 334.40 14.13 19.90 +3.77498 0.00 18.100 0 0.6550 5.9520 84.70 2.8715 24 666.0 20.20 22.01 17.15 19.00 +4.42228 0.00 18.100 0 0.5840 6.0030 94.50 2.5403 24 666.0 20.20 331.29 21.32 19.10 +15.57570 0.00 18.100 0 0.5800 5.9260 71.00 2.9084 24 666.0 20.20 368.74 18.13 19.10 +13.07510 0.00 18.100 0 0.5800 5.7130 56.70 2.8237 24 666.0 20.20 396.90 14.76 20.10 +4.34879 0.00 18.100 0 0.5800 6.1670 84.00 3.0334 24 666.0 20.20 396.90 16.29 19.90 +4.03841 0.00 18.100 0 0.5320 6.2290 90.70 3.0993 24 666.0 20.20 395.33 12.87 19.60 +3.56868 0.00 18.100 0 0.5800 6.4370 75.00 2.8965 24 666.0 20.20 393.37 14.36 23.20 +4.64689 0.00 18.100 0 0.6140 6.9800 67.60 2.5329 24 666.0 20.20 374.68 11.66 29.80 +8.05579 0.00 18.100 0 0.5840 5.4270 95.40 2.4298 24 666.0 20.20 352.58 18.14 13.80 +6.39312 0.00 18.100 0 0.5840 6.1620 97.40 2.2060 24 666.0 20.20 302.76 24.10 13.30 +4.87141 0.00 18.100 0 0.6140 6.4840 93.60 2.3053 24 666.0 20.20 396.21 18.68 16.70 +15.02340 0.00 18.100 0 0.6140 5.3040 97.30 2.1007 24 666.0 20.20 349.48 24.91 12.00 +10.23300 0.00 18.100 0 0.6140 6.1850 96.70 2.1705 24 666.0 20.20 379.70 18.03 14.60 +14.33370 0.00 18.100 0 0.6140 6.2290 88.00 1.9512 24 666.0 20.20 383.32 13.11 21.40 +5.82401 0.00 18.100 0 0.5320 6.2420 64.70 3.4242 24 666.0 20.20 396.90 10.74 23.00 +5.70818 0.00 18.100 0 0.5320 6.7500 74.90 3.3317 24 666.0 20.20 393.07 7.74 23.70 +5.73116 0.00 18.100 0 0.5320 7.0610 77.00 3.4106 24 666.0 20.20 395.28 7.01 25.00 +2.81838 0.00 18.100 0 0.5320 5.7620 40.30 4.0983 24 666.0 20.20 392.92 10.42 21.80 +2.37857 0.00 18.100 0 0.5830 5.8710 41.90 3.7240 24 666.0 20.20 370.73 13.34 20.60 +3.67367 0.00 18.100 0 0.5830 6.3120 51.90 3.9917 24 666.0 20.20 388.62 10.58 21.20 +5.69175 0.00 18.100 0 0.5830 6.1140 79.80 3.5459 24 666.0 20.20 392.68 14.98 19.10 +4.83567 0.00 18.100 0 0.5830 5.9050 53.20 3.1523 24 666.0 20.20 388.22 11.45 20.60 +0.15086 0.00 27.740 0 0.6090 5.4540 92.70 1.8209 4 711.0 20.10 395.09 18.06 15.20 +0.18337 0.00 27.740 0 0.6090 5.4140 98.30 1.7554 4 711.0 20.10 344.05 23.97 7.00 +0.20746 0.00 27.740 0 0.6090 5.0930 98.00 1.8226 4 711.0 20.10 318.43 29.68 8.10 +0.10574 0.00 27.740 0 0.6090 5.9830 98.80 1.8681 4 711.0 20.10 390.11 18.07 13.60 +0.11132 0.00 27.740 0 0.6090 5.9830 83.50 2.1099 4 711.0 20.10 396.90 13.35 20.10 +0.17331 0.00 9.690 0 0.5850 5.7070 54.00 2.3817 6 391.0 19.20 396.90 12.01 21.80 +0.27957 0.00 9.690 0 0.5850 5.9260 42.60 2.3817 6 391.0 19.20 396.90 13.59 24.50 +0.17899 0.00 9.690 0 0.5850 5.6700 28.80 2.7986 6 391.0 19.20 393.29 17.60 23.10 +0.28960 0.00 9.690 0 0.5850 5.3900 72.90 2.7986 6 391.0 19.20 396.90 21.14 19.70 +0.26838 0.00 9.690 0 0.5850 5.7940 70.60 2.8927 6 391.0 19.20 396.90 14.10 18.30 +0.23912 0.00 9.690 0 0.5850 6.0190 65.30 2.4091 6 391.0 19.20 396.90 12.92 21.20 +0.17783 0.00 9.690 0 0.5850 5.5690 73.50 2.3999 6 391.0 19.20 395.77 15.10 17.50 +0.22438 0.00 9.690 0 0.5850 6.0270 79.70 2.4982 6 391.0 19.20 396.90 14.33 16.80 +0.06263 0.00 11.930 0 0.5730 6.5930 69.10 2.4786 1 273.0 21.00 391.99 9.67 22.40 +0.04527 0.00 11.930 0 0.5730 6.1200 76.70 2.2875 1 273.0 21.00 396.90 9.08 20.60 +0.06076 0.00 11.930 0 0.5730 6.9760 91.00 2.1675 1 273.0 21.00 396.90 5.64 23.90 +0.10959 0.00 11.930 0 0.5730 6.7940 89.30 2.3889 1 273.0 21.00 393.45 6.48 22.00 +0.04741 0.00 11.930 0 0.5730 6.0300 80.80 2.5050 1 273.0 21.00 396.90 7.88 11.90 diff --git a/e2e/datasets/iris.csv b/e2e/datasets/iris.csv new file mode 100644 index 00000000..1bf42f25 --- /dev/null +++ b/e2e/datasets/iris.csv @@ -0,0 +1,151 @@ +Id,SepalLengthCm,SepalWidthCm,PetalLengthCm,PetalWidthCm,Species +1,5.1,3.5,1.4,0.2,Iris-setosa +2,4.9,3.0,1.4,0.2,Iris-setosa +3,4.7,3.2,1.3,0.2,Iris-setosa +4,4.6,3.1,1.5,0.2,Iris-setosa +5,5.0,3.6,1.4,0.2,Iris-setosa +6,5.4,3.9,1.7,0.4,Iris-setosa +7,4.6,3.4,1.4,0.3,Iris-setosa +8,5.0,3.4,1.5,0.2,Iris-setosa +9,4.4,2.9,1.4,0.2,Iris-setosa +10,4.9,3.1,1.5,0.1,Iris-setosa +11,5.4,3.7,1.5,0.2,Iris-setosa +12,4.8,3.4,1.6,0.2,Iris-setosa +13,4.8,3.0,1.4,0.1,Iris-setosa +14,4.3,3.0,1.1,0.1,Iris-setosa +15,5.8,4.0,1.2,0.2,Iris-setosa +16,5.7,4.4,1.5,0.4,Iris-setosa +17,5.4,3.9,1.3,0.4,Iris-setosa +18,5.1,3.5,1.4,0.3,Iris-setosa +19,5.7,3.8,1.7,0.3,Iris-setosa +20,5.1,3.8,1.5,0.3,Iris-setosa +21,5.4,3.4,1.7,0.2,Iris-setosa +22,5.1,3.7,1.5,0.4,Iris-setosa +23,4.6,3.6,1.0,0.2,Iris-setosa +24,5.1,3.3,1.7,0.5,Iris-setosa +25,4.8,3.4,1.9,0.2,Iris-setosa +26,5.0,3.0,1.6,0.2,Iris-setosa +27,5.0,3.4,1.6,0.4,Iris-setosa +28,5.2,3.5,1.5,0.2,Iris-setosa +29,5.2,3.4,1.4,0.2,Iris-setosa +30,4.7,3.2,1.6,0.2,Iris-setosa +31,4.8,3.1,1.6,0.2,Iris-setosa +32,5.4,3.4,1.5,0.4,Iris-setosa +33,5.2,4.1,1.5,0.1,Iris-setosa +34,5.5,4.2,1.4,0.2,Iris-setosa +35,4.9,3.1,1.5,0.1,Iris-setosa +36,5.0,3.2,1.2,0.2,Iris-setosa +37,5.5,3.5,1.3,0.2,Iris-setosa +38,4.9,3.1,1.5,0.1,Iris-setosa +39,4.4,3.0,1.3,0.2,Iris-setosa +40,5.1,3.4,1.5,0.2,Iris-setosa +41,5.0,3.5,1.3,0.3,Iris-setosa +42,4.5,2.3,1.3,0.3,Iris-setosa +43,4.4,3.2,1.3,0.2,Iris-setosa +44,5.0,3.5,1.6,0.6,Iris-setosa +45,5.1,3.8,1.9,0.4,Iris-setosa +46,4.8,3.0,1.4,0.3,Iris-setosa +47,5.1,3.8,1.6,0.2,Iris-setosa +48,4.6,3.2,1.4,0.2,Iris-setosa +49,5.3,3.7,1.5,0.2,Iris-setosa +50,5.0,3.3,1.4,0.2,Iris-setosa +51,7.0,3.2,4.7,1.4,Iris-versicolor +52,6.4,3.2,4.5,1.5,Iris-versicolor +53,6.9,3.1,4.9,1.5,Iris-versicolor +54,5.5,2.3,4.0,1.3,Iris-versicolor +55,6.5,2.8,4.6,1.5,Iris-versicolor +56,5.7,2.8,4.5,1.3,Iris-versicolor +57,6.3,3.3,4.7,1.6,Iris-versicolor +58,4.9,2.4,3.3,1.0,Iris-versicolor +59,6.6,2.9,4.6,1.3,Iris-versicolor +60,5.2,2.7,3.9,1.4,Iris-versicolor +61,5.0,2.0,3.5,1.0,Iris-versicolor +62,5.9,3.0,4.2,1.5,Iris-versicolor +63,6.0,2.2,4.0,1.0,Iris-versicolor +64,6.1,2.9,4.7,1.4,Iris-versicolor +65,5.6,2.9,3.6,1.3,Iris-versicolor +66,6.7,3.1,4.4,1.4,Iris-versicolor +67,5.6,3.0,4.5,1.5,Iris-versicolor +68,5.8,2.7,4.1,1.0,Iris-versicolor +69,6.2,2.2,4.5,1.5,Iris-versicolor +70,5.6,2.5,3.9,1.1,Iris-versicolor +71,5.9,3.2,4.8,1.8,Iris-versicolor +72,6.1,2.8,4.0,1.3,Iris-versicolor +73,6.3,2.5,4.9,1.5,Iris-versicolor +74,6.1,2.8,4.7,1.2,Iris-versicolor +75,6.4,2.9,4.3,1.3,Iris-versicolor +76,6.6,3.0,4.4,1.4,Iris-versicolor +77,6.8,2.8,4.8,1.4,Iris-versicolor +78,6.7,3.0,5.0,1.7,Iris-versicolor +79,6.0,2.9,4.5,1.5,Iris-versicolor +80,5.7,2.6,3.5,1.0,Iris-versicolor +81,5.5,2.4,3.8,1.1,Iris-versicolor +82,5.5,2.4,3.7,1.0,Iris-versicolor +83,5.8,2.7,3.9,1.2,Iris-versicolor +84,6.0,2.7,5.1,1.6,Iris-versicolor +85,5.4,3.0,4.5,1.5,Iris-versicolor +86,6.0,3.4,4.5,1.6,Iris-versicolor +87,6.7,3.1,4.7,1.5,Iris-versicolor +88,6.3,2.3,4.4,1.3,Iris-versicolor +89,5.6,3.0,4.1,1.3,Iris-versicolor +90,5.5,2.5,4.0,1.3,Iris-versicolor +91,5.5,2.6,4.4,1.2,Iris-versicolor +92,6.1,3.0,4.6,1.4,Iris-versicolor +93,5.8,2.6,4.0,1.2,Iris-versicolor +94,5.0,2.3,3.3,1.0,Iris-versicolor +95,5.6,2.7,4.2,1.3,Iris-versicolor +96,5.7,3.0,4.2,1.2,Iris-versicolor +97,5.7,2.9,4.2,1.3,Iris-versicolor +98,6.2,2.9,4.3,1.3,Iris-versicolor +99,5.1,2.5,3.0,1.1,Iris-versicolor +100,5.7,2.8,4.1,1.3,Iris-versicolor +101,6.3,3.3,6.0,2.5,Iris-virginica +102,5.8,2.7,5.1,1.9,Iris-virginica +103,7.1,3.0,5.9,2.1,Iris-virginica +104,6.3,2.9,5.6,1.8,Iris-virginica +105,6.5,3.0,5.8,2.2,Iris-virginica +106,7.6,3.0,6.6,2.1,Iris-virginica +107,4.9,2.5,4.5,1.7,Iris-virginica +108,7.3,2.9,6.3,1.8,Iris-virginica +109,6.7,2.5,5.8,1.8,Iris-virginica +110,7.2,3.6,6.1,2.5,Iris-virginica +111,6.5,3.2,5.1,2.0,Iris-virginica +112,6.4,2.7,5.3,1.9,Iris-virginica +113,6.8,3.0,5.5,2.1,Iris-virginica +114,5.7,2.5,5.0,2.0,Iris-virginica +115,5.8,2.8,5.1,2.4,Iris-virginica +116,6.4,3.2,5.3,2.3,Iris-virginica +117,6.5,3.0,5.5,1.8,Iris-virginica +118,7.7,3.8,6.7,2.2,Iris-virginica +119,7.7,2.6,6.9,2.3,Iris-virginica +120,6.0,2.2,5.0,1.5,Iris-virginica +121,6.9,3.2,5.7,2.3,Iris-virginica +122,5.6,2.8,4.9,2.0,Iris-virginica +123,7.7,2.8,6.7,2.0,Iris-virginica +124,6.3,2.7,4.9,1.8,Iris-virginica +125,6.7,3.3,5.7,2.1,Iris-virginica +126,7.2,3.2,6.0,1.8,Iris-virginica +127,6.2,2.8,4.8,1.8,Iris-virginica +128,6.1,3.0,4.9,1.8,Iris-virginica +129,6.4,2.8,5.6,2.1,Iris-virginica +130,7.2,3.0,5.8,1.6,Iris-virginica +131,7.4,2.8,6.1,1.9,Iris-virginica +132,7.9,3.8,6.4,2.0,Iris-virginica +133,6.4,2.8,5.6,2.2,Iris-virginica +134,6.3,2.8,5.1,1.5,Iris-virginica +135,6.1,2.6,5.6,1.4,Iris-virginica +136,7.7,3.0,6.1,2.3,Iris-virginica +137,6.3,3.4,5.6,2.4,Iris-virginica +138,6.4,3.1,5.5,1.8,Iris-virginica +139,6.0,3.0,4.8,1.8,Iris-virginica +140,6.9,3.1,5.4,2.1,Iris-virginica +141,6.7,3.1,5.6,2.4,Iris-virginica +142,6.9,3.1,5.1,2.3,Iris-virginica +143,5.8,2.7,5.1,1.9,Iris-virginica +144,6.8,3.2,5.9,2.3,Iris-virginica +145,6.7,3.3,5.7,2.5,Iris-virginica +146,6.7,3.0,5.2,2.3,Iris-virginica +147,6.3,2.5,5.0,1.9,Iris-virginica +148,6.5,3.0,5.2,2.0,Iris-virginica +149,6.2,3.4,5.4,2.3,Iris-virginica +150,5.9,3.0,5.1,1.8,Iris-virginica diff --git a/e2e/datasets/pima_indians_diabetes_database.csv b/e2e/datasets/pima_indians_diabetes_database.csv new file mode 100644 index 00000000..75398d97 --- /dev/null +++ b/e2e/datasets/pima_indians_diabetes_database.csv @@ -0,0 +1,769 @@ +number of times pregnant,plasma glucose concentration a 2 hours in an oral glucose tolerance test,diastolic blood pressure (mm Hg),triceps skin fold thickness (mm),2-Hour serum insulin (mu U/ml),body mass index (weight in kg/(height in m)^2),diabetes pedigree function,age (years),class variable (0 or 1) +6 ,148 ,72 ,35 ,0 ,33.6 ,0.627 ,50 ,1 +1 ,85 ,66 ,29 ,0 ,26.6 ,0.351 ,31 ,0 +8 ,183 ,64 ,0 ,0 ,23.3 ,0.672 ,32 ,1 +1 ,89 ,66 ,23 ,94 ,28.1 ,0.167 ,21 ,0 +0 ,137 ,40 ,35 ,168 ,43.1 ,2.288 ,33 ,1 +5 ,116 ,74 ,0 ,0 ,25.6 ,0.201 ,30 ,0 +3 ,78 ,50 ,32 ,88 ,31.0 ,0.248 ,26 ,1 +10 ,115 ,0 ,0 ,0 ,35.3 ,0.134 ,29 ,0 +2 ,197 ,70 ,45 ,543 ,30.5 ,0.158 ,53 ,1 +8 ,125 ,96 ,0 ,0 ,0.0 ,0.232 ,54 ,1 +4 ,110 ,92 ,0 ,0 ,37.6 ,0.191 ,30 ,0 +10 ,168 ,74 ,0 ,0 ,38.0 ,0.537 ,34 ,1 +10 ,139 ,80 ,0 ,0 ,27.1 ,1.441 ,57 ,0 +1 ,189 ,60 ,23 ,846 ,30.1 ,0.398 ,59 ,1 +5 ,166 ,72 ,19 ,175 ,25.8 ,0.587 ,51 ,1 +7 ,100 ,0 ,0 ,0 ,30.0 ,0.484 ,32 ,1 +0 ,118 ,84 ,47 ,230 ,45.8 ,0.551 ,31 ,1 +7 ,107 ,74 ,0 ,0 ,29.6 ,0.254 ,31 ,1 +1 ,103 ,30 ,38 ,83 ,43.3 ,0.183 ,33 ,0 +1 ,115 ,70 ,30 ,96 ,34.6 ,0.529 ,32 ,1 +3 ,126 ,88 ,41 ,235 ,39.3 ,0.704 ,27 ,0 +8 ,99 ,84 ,0 ,0 ,35.4 ,0.388 ,50 ,0 +7 ,196 ,90 ,0 ,0 ,39.8 ,0.451 ,41 ,1 +9 ,119 ,80 ,35 ,0 ,29.0 ,0.263 ,29 ,1 +11 ,143 ,94 ,33 ,146 ,36.6 ,0.254 ,51 ,1 +10 ,125 ,70 ,26 ,115 ,31.1 ,0.205 ,41 ,1 +7 ,147 ,76 ,0 ,0 ,39.4 ,0.257 ,43 ,1 +1 ,97 ,66 ,15 ,140 ,23.2 ,0.487 ,22 ,0 +13 ,145 ,82 ,19 ,110 ,22.2 ,0.245 ,57 ,0 +5 ,117 ,92 ,0 ,0 ,34.1 ,0.337 ,38 ,0 +5 ,109 ,75 ,26 ,0 ,36.0 ,0.546 ,60 ,0 +3 ,158 ,76 ,36 ,245 ,31.6 ,0.851 ,28 ,1 +3 ,88 ,58 ,11 ,54 ,24.8 ,0.267 ,22 ,0 +6 ,92 ,92 ,0 ,0 ,19.9 ,0.188 ,28 ,0 +10 ,122 ,78 ,31 ,0 ,27.6 ,0.512 ,45 ,0 +4 ,103 ,60 ,33 ,192 ,24.0 ,0.966 ,33 ,0 +11 ,138 ,76 ,0 ,0 ,33.2 ,0.420 ,35 ,0 +9 ,102 ,76 ,37 ,0 ,32.9 ,0.665 ,46 ,1 +2 ,90 ,68 ,42 ,0 ,38.2 ,0.503 ,27 ,1 +4 ,111 ,72 ,47 ,207 ,37.1 ,1.390 ,56 ,1 +3 ,180 ,64 ,25 ,70 ,34.0 ,0.271 ,26 ,0 +7 ,133 ,84 ,0 ,0 ,40.2 ,0.696 ,37 ,0 +7 ,106 ,92 ,18 ,0 ,22.7 ,0.235 ,48 ,0 +9 ,171 ,110 ,24 ,240 ,45.4 ,0.721 ,54 ,1 +7 ,159 ,64 ,0 ,0 ,27.4 ,0.294 ,40 ,0 +0 ,180 ,66 ,39 ,0 ,42.0 ,1.893 ,25 ,1 +1 ,146 ,56 ,0 ,0 ,29.7 ,0.564 ,29 ,0 +2 ,71 ,70 ,27 ,0 ,28.0 ,0.586 ,22 ,0 +7 ,103 ,66 ,32 ,0 ,39.1 ,0.344 ,31 ,1 +7 ,105 ,0 ,0 ,0 ,0.0 ,0.305 ,24 ,0 +1 ,103 ,80 ,11 ,82 ,19.4 ,0.491 ,22 ,0 +1 ,101 ,50 ,15 ,36 ,24.2 ,0.526 ,26 ,0 +5 ,88 ,66 ,21 ,23 ,24.4 ,0.342 ,30 ,0 +8 ,176 ,90 ,34 ,300 ,33.7 ,0.467 ,58 ,1 +7 ,150 ,66 ,42 ,342 ,34.7 ,0.718 ,42 ,0 +1 ,73 ,50 ,10 ,0 ,23.0 ,0.248 ,21 ,0 +7 ,187 ,68 ,39 ,304 ,37.7 ,0.254 ,41 ,1 +0 ,100 ,88 ,60 ,110 ,46.8 ,0.962 ,31 ,0 +0 ,146 ,82 ,0 ,0 ,40.5 ,1.781 ,44 ,0 +0 ,105 ,64 ,41 ,142 ,41.5 ,0.173 ,22 ,0 +2 ,84 ,0 ,0 ,0 ,0.0 ,0.304 ,21 ,0 +8 ,133 ,72 ,0 ,0 ,32.9 ,0.270 ,39 ,1 +5 ,44 ,62 ,0 ,0 ,25.0 ,0.587 ,36 ,0 +2 ,141 ,58 ,34 ,128 ,25.4 ,0.699 ,24 ,0 +7 ,114 ,66 ,0 ,0 ,32.8 ,0.258 ,42 ,1 +5 ,99 ,74 ,27 ,0 ,29.0 ,0.203 ,32 ,0 +0 ,109 ,88 ,30 ,0 ,32.5 ,0.855 ,38 ,1 +2 ,109 ,92 ,0 ,0 ,42.7 ,0.845 ,54 ,0 +1 ,95 ,66 ,13 ,38 ,19.6 ,0.334 ,25 ,0 +4 ,146 ,85 ,27 ,100 ,28.9 ,0.189 ,27 ,0 +2 ,100 ,66 ,20 ,90 ,32.9 ,0.867 ,28 ,1 +5 ,139 ,64 ,35 ,140 ,28.6 ,0.411 ,26 ,0 +13 ,126 ,90 ,0 ,0 ,43.4 ,0.583 ,42 ,1 +4 ,129 ,86 ,20 ,270 ,35.1 ,0.231 ,23 ,0 +1 ,79 ,75 ,30 ,0 ,32.0 ,0.396 ,22 ,0 +1 ,0 ,48 ,20 ,0 ,24.7 ,0.140 ,22 ,0 +7 ,62 ,78 ,0 ,0 ,32.6 ,0.391 ,41 ,0 +5 ,95 ,72 ,33 ,0 ,37.7 ,0.370 ,27 ,0 +0 ,131 ,0 ,0 ,0 ,43.2 ,0.270 ,26 ,1 +2 ,112 ,66 ,22 ,0 ,25.0 ,0.307 ,24 ,0 +3 ,113 ,44 ,13 ,0 ,22.4 ,0.140 ,22 ,0 +2 ,74 ,0 ,0 ,0 ,0.0 ,0.102 ,22 ,0 +7 ,83 ,78 ,26 ,71 ,29.3 ,0.767 ,36 ,0 +0 ,101 ,65 ,28 ,0 ,24.6 ,0.237 ,22 ,0 +5 ,137 ,108 ,0 ,0 ,48.8 ,0.227 ,37 ,1 +2 ,110 ,74 ,29 ,125 ,32.4 ,0.698 ,27 ,0 +13 ,106 ,72 ,54 ,0 ,36.6 ,0.178 ,45 ,0 +2 ,100 ,68 ,25 ,71 ,38.5 ,0.324 ,26 ,0 +15 ,136 ,70 ,32 ,110 ,37.1 ,0.153 ,43 ,1 +1 ,107 ,68 ,19 ,0 ,26.5 ,0.165 ,24 ,0 +1 ,80 ,55 ,0 ,0 ,19.1 ,0.258 ,21 ,0 +4 ,123 ,80 ,15 ,176 ,32.0 ,0.443 ,34 ,0 +7 ,81 ,78 ,40 ,48 ,46.7 ,0.261 ,42 ,0 +4 ,134 ,72 ,0 ,0 ,23.8 ,0.277 ,60 ,1 +2 ,142 ,82 ,18 ,64 ,24.7 ,0.761 ,21 ,0 +6 ,144 ,72 ,27 ,228 ,33.9 ,0.255 ,40 ,0 +2 ,92 ,62 ,28 ,0 ,31.6 ,0.130 ,24 ,0 +1 ,71 ,48 ,18 ,76 ,20.4 ,0.323 ,22 ,0 +6 ,93 ,50 ,30 ,64 ,28.7 ,0.356 ,23 ,0 +1 ,122 ,90 ,51 ,220 ,49.7 ,0.325 ,31 ,1 +1 ,163 ,72 ,0 ,0 ,39.0 ,1.222 ,33 ,1 +1 ,151 ,60 ,0 ,0 ,26.1 ,0.179 ,22 ,0 +0 ,125 ,96 ,0 ,0 ,22.5 ,0.262 ,21 ,0 +1 ,81 ,72 ,18 ,40 ,26.6 ,0.283 ,24 ,0 +2 ,85 ,65 ,0 ,0 ,39.6 ,0.930 ,27 ,0 +1 ,126 ,56 ,29 ,152 ,28.7 ,0.801 ,21 ,0 +1 ,96 ,122 ,0 ,0 ,22.4 ,0.207 ,27 ,0 +4 ,144 ,58 ,28 ,140 ,29.5 ,0.287 ,37 ,0 +3 ,83 ,58 ,31 ,18 ,34.3 ,0.336 ,25 ,0 +0 ,95 ,85 ,25 ,36 ,37.4 ,0.247 ,24 ,1 +3 ,171 ,72 ,33 ,135 ,33.3 ,0.199 ,24 ,1 +8 ,155 ,62 ,26 ,495 ,34.0 ,0.543 ,46 ,1 +1 ,89 ,76 ,34 ,37 ,31.2 ,0.192 ,23 ,0 +4 ,76 ,62 ,0 ,0 ,34.0 ,0.391 ,25 ,0 +7 ,160 ,54 ,32 ,175 ,30.5 ,0.588 ,39 ,1 +4 ,146 ,92 ,0 ,0 ,31.2 ,0.539 ,61 ,1 +5 ,124 ,74 ,0 ,0 ,34.0 ,0.220 ,38 ,1 +5 ,78 ,48 ,0 ,0 ,33.7 ,0.654 ,25 ,0 +4 ,97 ,60 ,23 ,0 ,28.2 ,0.443 ,22 ,0 +4 ,99 ,76 ,15 ,51 ,23.2 ,0.223 ,21 ,0 +0 ,162 ,76 ,56 ,100 ,53.2 ,0.759 ,25 ,1 +6 ,111 ,64 ,39 ,0 ,34.2 ,0.260 ,24 ,0 +2 ,107 ,74 ,30 ,100 ,33.6 ,0.404 ,23 ,0 +5 ,132 ,80 ,0 ,0 ,26.8 ,0.186 ,69 ,0 +0 ,113 ,76 ,0 ,0 ,33.3 ,0.278 ,23 ,1 +1 ,88 ,30 ,42 ,99 ,55.0 ,0.496 ,26 ,1 +3 ,120 ,70 ,30 ,135 ,42.9 ,0.452 ,30 ,0 +1 ,118 ,58 ,36 ,94 ,33.3 ,0.261 ,23 ,0 +1 ,117 ,88 ,24 ,145 ,34.5 ,0.403 ,40 ,1 +0 ,105 ,84 ,0 ,0 ,27.9 ,0.741 ,62 ,1 +4 ,173 ,70 ,14 ,168 ,29.7 ,0.361 ,33 ,1 +9 ,122 ,56 ,0 ,0 ,33.3 ,1.114 ,33 ,1 +3 ,170 ,64 ,37 ,225 ,34.5 ,0.356 ,30 ,1 +8 ,84 ,74 ,31 ,0 ,38.3 ,0.457 ,39 ,0 +2 ,96 ,68 ,13 ,49 ,21.1 ,0.647 ,26 ,0 +2 ,125 ,60 ,20 ,140 ,33.8 ,0.088 ,31 ,0 +0 ,100 ,70 ,26 ,50 ,30.8 ,0.597 ,21 ,0 +0 ,93 ,60 ,25 ,92 ,28.7 ,0.532 ,22 ,0 +0 ,129 ,80 ,0 ,0 ,31.2 ,0.703 ,29 ,0 +5 ,105 ,72 ,29 ,325 ,36.9 ,0.159 ,28 ,0 +3 ,128 ,78 ,0 ,0 ,21.1 ,0.268 ,55 ,0 +5 ,106 ,82 ,30 ,0 ,39.5 ,0.286 ,38 ,0 +2 ,108 ,52 ,26 ,63 ,32.5 ,0.318 ,22 ,0 +10 ,108 ,66 ,0 ,0 ,32.4 ,0.272 ,42 ,1 +4 ,154 ,62 ,31 ,284 ,32.8 ,0.237 ,23 ,0 +0 ,102 ,75 ,23 ,0 ,0.0 ,0.572 ,21 ,0 +9 ,57 ,80 ,37 ,0 ,32.8 ,0.096 ,41 ,0 +2 ,106 ,64 ,35 ,119 ,30.5 ,1.400 ,34 ,0 +5 ,147 ,78 ,0 ,0 ,33.7 ,0.218 ,65 ,0 +2 ,90 ,70 ,17 ,0 ,27.3 ,0.085 ,22 ,0 +1 ,136 ,74 ,50 ,204 ,37.4 ,0.399 ,24 ,0 +4 ,114 ,65 ,0 ,0 ,21.9 ,0.432 ,37 ,0 +9 ,156 ,86 ,28 ,155 ,34.3 ,1.189 ,42 ,1 +1 ,153 ,82 ,42 ,485 ,40.6 ,0.687 ,23 ,0 +8 ,188 ,78 ,0 ,0 ,47.9 ,0.137 ,43 ,1 +7 ,152 ,88 ,44 ,0 ,50.0 ,0.337 ,36 ,1 +2 ,99 ,52 ,15 ,94 ,24.6 ,0.637 ,21 ,0 +1 ,109 ,56 ,21 ,135 ,25.2 ,0.833 ,23 ,0 +2 ,88 ,74 ,19 ,53 ,29.0 ,0.229 ,22 ,0 +17 ,163 ,72 ,41 ,114 ,40.9 ,0.817 ,47 ,1 +4 ,151 ,90 ,38 ,0 ,29.7 ,0.294 ,36 ,0 +7 ,102 ,74 ,40 ,105 ,37.2 ,0.204 ,45 ,0 +0 ,114 ,80 ,34 ,285 ,44.2 ,0.167 ,27 ,0 +2 ,100 ,64 ,23 ,0 ,29.7 ,0.368 ,21 ,0 +0 ,131 ,88 ,0 ,0 ,31.6 ,0.743 ,32 ,1 +6 ,104 ,74 ,18 ,156 ,29.9 ,0.722 ,41 ,1 +3 ,148 ,66 ,25 ,0 ,32.5 ,0.256 ,22 ,0 +4 ,120 ,68 ,0 ,0 ,29.6 ,0.709 ,34 ,0 +4 ,110 ,66 ,0 ,0 ,31.9 ,0.471 ,29 ,0 +3 ,111 ,90 ,12 ,78 ,28.4 ,0.495 ,29 ,0 +6 ,102 ,82 ,0 ,0 ,30.8 ,0.180 ,36 ,1 +6 ,134 ,70 ,23 ,130 ,35.4 ,0.542 ,29 ,1 +2 ,87 ,0 ,23 ,0 ,28.9 ,0.773 ,25 ,0 +1 ,79 ,60 ,42 ,48 ,43.5 ,0.678 ,23 ,0 +2 ,75 ,64 ,24 ,55 ,29.7 ,0.370 ,33 ,0 +8 ,179 ,72 ,42 ,130 ,32.7 ,0.719 ,36 ,1 +6 ,85 ,78 ,0 ,0 ,31.2 ,0.382 ,42 ,0 +0 ,129 ,110 ,46 ,130 ,67.1 ,0.319 ,26 ,1 +5 ,143 ,78 ,0 ,0 ,45.0 ,0.190 ,47 ,0 +5 ,130 ,82 ,0 ,0 ,39.1 ,0.956 ,37 ,1 +6 ,87 ,80 ,0 ,0 ,23.2 ,0.084 ,32 ,0 +0 ,119 ,64 ,18 ,92 ,34.9 ,0.725 ,23 ,0 +1 ,0 ,74 ,20 ,23 ,27.7 ,0.299 ,21 ,0 +5 ,73 ,60 ,0 ,0 ,26.8 ,0.268 ,27 ,0 +4 ,141 ,74 ,0 ,0 ,27.6 ,0.244 ,40 ,0 +7 ,194 ,68 ,28 ,0 ,35.9 ,0.745 ,41 ,1 +8 ,181 ,68 ,36 ,495 ,30.1 ,0.615 ,60 ,1 +1 ,128 ,98 ,41 ,58 ,32.0 ,1.321 ,33 ,1 +8 ,109 ,76 ,39 ,114 ,27.9 ,0.640 ,31 ,1 +5 ,139 ,80 ,35 ,160 ,31.6 ,0.361 ,25 ,1 +3 ,111 ,62 ,0 ,0 ,22.6 ,0.142 ,21 ,0 +9 ,123 ,70 ,44 ,94 ,33.1 ,0.374 ,40 ,0 +7 ,159 ,66 ,0 ,0 ,30.4 ,0.383 ,36 ,1 +11 ,135 ,0 ,0 ,0 ,52.3 ,0.578 ,40 ,1 +8 ,85 ,55 ,20 ,0 ,24.4 ,0.136 ,42 ,0 +5 ,158 ,84 ,41 ,210 ,39.4 ,0.395 ,29 ,1 +1 ,105 ,58 ,0 ,0 ,24.3 ,0.187 ,21 ,0 +3 ,107 ,62 ,13 ,48 ,22.9 ,0.678 ,23 ,1 +4 ,109 ,64 ,44 ,99 ,34.8 ,0.905 ,26 ,1 +4 ,148 ,60 ,27 ,318 ,30.9 ,0.150 ,29 ,1 +0 ,113 ,80 ,16 ,0 ,31.0 ,0.874 ,21 ,0 +1 ,138 ,82 ,0 ,0 ,40.1 ,0.236 ,28 ,0 +0 ,108 ,68 ,20 ,0 ,27.3 ,0.787 ,32 ,0 +2 ,99 ,70 ,16 ,44 ,20.4 ,0.235 ,27 ,0 +6 ,103 ,72 ,32 ,190 ,37.7 ,0.324 ,55 ,0 +5 ,111 ,72 ,28 ,0 ,23.9 ,0.407 ,27 ,0 +8 ,196 ,76 ,29 ,280 ,37.5 ,0.605 ,57 ,1 +5 ,162 ,104 ,0 ,0 ,37.7 ,0.151 ,52 ,1 +1 ,96 ,64 ,27 ,87 ,33.2 ,0.289 ,21 ,0 +7 ,184 ,84 ,33 ,0 ,35.5 ,0.355 ,41 ,1 +2 ,81 ,60 ,22 ,0 ,27.7 ,0.290 ,25 ,0 +0 ,147 ,85 ,54 ,0 ,42.8 ,0.375 ,24 ,0 +7 ,179 ,95 ,31 ,0 ,34.2 ,0.164 ,60 ,0 +0 ,140 ,65 ,26 ,130 ,42.6 ,0.431 ,24 ,1 +9 ,112 ,82 ,32 ,175 ,34.2 ,0.260 ,36 ,1 +12 ,151 ,70 ,40 ,271 ,41.8 ,0.742 ,38 ,1 +5 ,109 ,62 ,41 ,129 ,35.8 ,0.514 ,25 ,1 +6 ,125 ,68 ,30 ,120 ,30.0 ,0.464 ,32 ,0 +5 ,85 ,74 ,22 ,0 ,29.0 ,1.224 ,32 ,1 +5 ,112 ,66 ,0 ,0 ,37.8 ,0.261 ,41 ,1 +0 ,177 ,60 ,29 ,478 ,34.6 ,1.072 ,21 ,1 +2 ,158 ,90 ,0 ,0 ,31.6 ,0.805 ,66 ,1 +7 ,119 ,0 ,0 ,0 ,25.2 ,0.209 ,37 ,0 +7 ,142 ,60 ,33 ,190 ,28.8 ,0.687 ,61 ,0 +1 ,100 ,66 ,15 ,56 ,23.6 ,0.666 ,26 ,0 +1 ,87 ,78 ,27 ,32 ,34.6 ,0.101 ,22 ,0 +0 ,101 ,76 ,0 ,0 ,35.7 ,0.198 ,26 ,0 +3 ,162 ,52 ,38 ,0 ,37.2 ,0.652 ,24 ,1 +4 ,197 ,70 ,39 ,744 ,36.7 ,2.329 ,31 ,0 +0 ,117 ,80 ,31 ,53 ,45.2 ,0.089 ,24 ,0 +4 ,142 ,86 ,0 ,0 ,44.0 ,0.645 ,22 ,1 +6 ,134 ,80 ,37 ,370 ,46.2 ,0.238 ,46 ,1 +1 ,79 ,80 ,25 ,37 ,25.4 ,0.583 ,22 ,0 +4 ,122 ,68 ,0 ,0 ,35.0 ,0.394 ,29 ,0 +3 ,74 ,68 ,28 ,45 ,29.7 ,0.293 ,23 ,0 +4 ,171 ,72 ,0 ,0 ,43.6 ,0.479 ,26 ,1 +7 ,181 ,84 ,21 ,192 ,35.9 ,0.586 ,51 ,1 +0 ,179 ,90 ,27 ,0 ,44.1 ,0.686 ,23 ,1 +9 ,164 ,84 ,21 ,0 ,30.8 ,0.831 ,32 ,1 +0 ,104 ,76 ,0 ,0 ,18.4 ,0.582 ,27 ,0 +1 ,91 ,64 ,24 ,0 ,29.2 ,0.192 ,21 ,0 +4 ,91 ,70 ,32 ,88 ,33.1 ,0.446 ,22 ,0 +3 ,139 ,54 ,0 ,0 ,25.6 ,0.402 ,22 ,1 +6 ,119 ,50 ,22 ,176 ,27.1 ,1.318 ,33 ,1 +2 ,146 ,76 ,35 ,194 ,38.2 ,0.329 ,29 ,0 +9 ,184 ,85 ,15 ,0 ,30.0 ,1.213 ,49 ,1 +10 ,122 ,68 ,0 ,0 ,31.2 ,0.258 ,41 ,0 +0 ,165 ,90 ,33 ,680 ,52.3 ,0.427 ,23 ,0 +9 ,124 ,70 ,33 ,402 ,35.4 ,0.282 ,34 ,0 +1 ,111 ,86 ,19 ,0 ,30.1 ,0.143 ,23 ,0 +9 ,106 ,52 ,0 ,0 ,31.2 ,0.380 ,42 ,0 +2 ,129 ,84 ,0 ,0 ,28.0 ,0.284 ,27 ,0 +2 ,90 ,80 ,14 ,55 ,24.4 ,0.249 ,24 ,0 +0 ,86 ,68 ,32 ,0 ,35.8 ,0.238 ,25 ,0 +12 ,92 ,62 ,7 ,258 ,27.6 ,0.926 ,44 ,1 +1 ,113 ,64 ,35 ,0 ,33.6 ,0.543 ,21 ,1 +3 ,111 ,56 ,39 ,0 ,30.1 ,0.557 ,30 ,0 +2 ,114 ,68 ,22 ,0 ,28.7 ,0.092 ,25 ,0 +1 ,193 ,50 ,16 ,375 ,25.9 ,0.655 ,24 ,0 +11 ,155 ,76 ,28 ,150 ,33.3 ,1.353 ,51 ,1 +3 ,191 ,68 ,15 ,130 ,30.9 ,0.299 ,34 ,0 +3 ,141 ,0 ,0 ,0 ,30.0 ,0.761 ,27 ,1 +4 ,95 ,70 ,32 ,0 ,32.1 ,0.612 ,24 ,0 +3 ,142 ,80 ,15 ,0 ,32.4 ,0.200 ,63 ,0 +4 ,123 ,62 ,0 ,0 ,32.0 ,0.226 ,35 ,1 +5 ,96 ,74 ,18 ,67 ,33.6 ,0.997 ,43 ,0 +0 ,138 ,0 ,0 ,0 ,36.3 ,0.933 ,25 ,1 +2 ,128 ,64 ,42 ,0 ,40.0 ,1.101 ,24 ,0 +0 ,102 ,52 ,0 ,0 ,25.1 ,0.078 ,21 ,0 +2 ,146 ,0 ,0 ,0 ,27.5 ,0.240 ,28 ,1 +10 ,101 ,86 ,37 ,0 ,45.6 ,1.136 ,38 ,1 +2 ,108 ,62 ,32 ,56 ,25.2 ,0.128 ,21 ,0 +3 ,122 ,78 ,0 ,0 ,23.0 ,0.254 ,40 ,0 +1 ,71 ,78 ,50 ,45 ,33.2 ,0.422 ,21 ,0 +13 ,106 ,70 ,0 ,0 ,34.2 ,0.251 ,52 ,0 +2 ,100 ,70 ,52 ,57 ,40.5 ,0.677 ,25 ,0 +7 ,106 ,60 ,24 ,0 ,26.5 ,0.296 ,29 ,1 +0 ,104 ,64 ,23 ,116 ,27.8 ,0.454 ,23 ,0 +5 ,114 ,74 ,0 ,0 ,24.9 ,0.744 ,57 ,0 +2 ,108 ,62 ,10 ,278 ,25.3 ,0.881 ,22 ,0 +0 ,146 ,70 ,0 ,0 ,37.9 ,0.334 ,28 ,1 +10 ,129 ,76 ,28 ,122 ,35.9 ,0.280 ,39 ,0 +7 ,133 ,88 ,15 ,155 ,32.4 ,0.262 ,37 ,0 +7 ,161 ,86 ,0 ,0 ,30.4 ,0.165 ,47 ,1 +2 ,108 ,80 ,0 ,0 ,27.0 ,0.259 ,52 ,1 +7 ,136 ,74 ,26 ,135 ,26.0 ,0.647 ,51 ,0 +5 ,155 ,84 ,44 ,545 ,38.7 ,0.619 ,34 ,0 +1 ,119 ,86 ,39 ,220 ,45.6 ,0.808 ,29 ,1 +4 ,96 ,56 ,17 ,49 ,20.8 ,0.340 ,26 ,0 +5 ,108 ,72 ,43 ,75 ,36.1 ,0.263 ,33 ,0 +0 ,78 ,88 ,29 ,40 ,36.9 ,0.434 ,21 ,0 +0 ,107 ,62 ,30 ,74 ,36.6 ,0.757 ,25 ,1 +2 ,128 ,78 ,37 ,182 ,43.3 ,1.224 ,31 ,1 +1 ,128 ,48 ,45 ,194 ,40.5 ,0.613 ,24 ,1 +0 ,161 ,50 ,0 ,0 ,21.9 ,0.254 ,65 ,0 +6 ,151 ,62 ,31 ,120 ,35.5 ,0.692 ,28 ,0 +2 ,146 ,70 ,38 ,360 ,28.0 ,0.337 ,29 ,1 +0 ,126 ,84 ,29 ,215 ,30.7 ,0.520 ,24 ,0 +14 ,100 ,78 ,25 ,184 ,36.6 ,0.412 ,46 ,1 +8 ,112 ,72 ,0 ,0 ,23.6 ,0.840 ,58 ,0 +0 ,167 ,0 ,0 ,0 ,32.3 ,0.839 ,30 ,1 +2 ,144 ,58 ,33 ,135 ,31.6 ,0.422 ,25 ,1 +5 ,77 ,82 ,41 ,42 ,35.8 ,0.156 ,35 ,0 +5 ,115 ,98 ,0 ,0 ,52.9 ,0.209 ,28 ,1 +3 ,150 ,76 ,0 ,0 ,21.0 ,0.207 ,37 ,0 +2 ,120 ,76 ,37 ,105 ,39.7 ,0.215 ,29 ,0 +10 ,161 ,68 ,23 ,132 ,25.5 ,0.326 ,47 ,1 +0 ,137 ,68 ,14 ,148 ,24.8 ,0.143 ,21 ,0 +0 ,128 ,68 ,19 ,180 ,30.5 ,1.391 ,25 ,1 +2 ,124 ,68 ,28 ,205 ,32.9 ,0.875 ,30 ,1 +6 ,80 ,66 ,30 ,0 ,26.2 ,0.313 ,41 ,0 +0 ,106 ,70 ,37 ,148 ,39.4 ,0.605 ,22 ,0 +2 ,155 ,74 ,17 ,96 ,26.6 ,0.433 ,27 ,1 +3 ,113 ,50 ,10 ,85 ,29.5 ,0.626 ,25 ,0 +7 ,109 ,80 ,31 ,0 ,35.9 ,1.127 ,43 ,1 +2 ,112 ,68 ,22 ,94 ,34.1 ,0.315 ,26 ,0 +3 ,99 ,80 ,11 ,64 ,19.3 ,0.284 ,30 ,0 +3 ,182 ,74 ,0 ,0 ,30.5 ,0.345 ,29 ,1 +3 ,115 ,66 ,39 ,140 ,38.1 ,0.150 ,28 ,0 +6 ,194 ,78 ,0 ,0 ,23.5 ,0.129 ,59 ,1 +4 ,129 ,60 ,12 ,231 ,27.5 ,0.527 ,31 ,0 +3 ,112 ,74 ,30 ,0 ,31.6 ,0.197 ,25 ,1 +0 ,124 ,70 ,20 ,0 ,27.4 ,0.254 ,36 ,1 +13 ,152 ,90 ,33 ,29 ,26.8 ,0.731 ,43 ,1 +2 ,112 ,75 ,32 ,0 ,35.7 ,0.148 ,21 ,0 +1 ,157 ,72 ,21 ,168 ,25.6 ,0.123 ,24 ,0 +1 ,122 ,64 ,32 ,156 ,35.1 ,0.692 ,30 ,1 +10 ,179 ,70 ,0 ,0 ,35.1 ,0.200 ,37 ,0 +2 ,102 ,86 ,36 ,120 ,45.5 ,0.127 ,23 ,1 +6 ,105 ,70 ,32 ,68 ,30.8 ,0.122 ,37 ,0 +8 ,118 ,72 ,19 ,0 ,23.1 ,1.476 ,46 ,0 +2 ,87 ,58 ,16 ,52 ,32.7 ,0.166 ,25 ,0 +1 ,180 ,0 ,0 ,0 ,43.3 ,0.282 ,41 ,1 +12 ,106 ,80 ,0 ,0 ,23.6 ,0.137 ,44 ,0 +1 ,95 ,60 ,18 ,58 ,23.9 ,0.260 ,22 ,0 +0 ,165 ,76 ,43 ,255 ,47.9 ,0.259 ,26 ,0 +0 ,117 ,0 ,0 ,0 ,33.8 ,0.932 ,44 ,0 +5 ,115 ,76 ,0 ,0 ,31.2 ,0.343 ,44 ,1 +9 ,152 ,78 ,34 ,171 ,34.2 ,0.893 ,33 ,1 +7 ,178 ,84 ,0 ,0 ,39.9 ,0.331 ,41 ,1 +1 ,130 ,70 ,13 ,105 ,25.9 ,0.472 ,22 ,0 +1 ,95 ,74 ,21 ,73 ,25.9 ,0.673 ,36 ,0 +1 ,0 ,68 ,35 ,0 ,32.0 ,0.389 ,22 ,0 +5 ,122 ,86 ,0 ,0 ,34.7 ,0.290 ,33 ,0 +8 ,95 ,72 ,0 ,0 ,36.8 ,0.485 ,57 ,0 +8 ,126 ,88 ,36 ,108 ,38.5 ,0.349 ,49 ,0 +1 ,139 ,46 ,19 ,83 ,28.7 ,0.654 ,22 ,0 +3 ,116 ,0 ,0 ,0 ,23.5 ,0.187 ,23 ,0 +3 ,99 ,62 ,19 ,74 ,21.8 ,0.279 ,26 ,0 +5 ,0 ,80 ,32 ,0 ,41.0 ,0.346 ,37 ,1 +4 ,92 ,80 ,0 ,0 ,42.2 ,0.237 ,29 ,0 +4 ,137 ,84 ,0 ,0 ,31.2 ,0.252 ,30 ,0 +3 ,61 ,82 ,28 ,0 ,34.4 ,0.243 ,46 ,0 +1 ,90 ,62 ,12 ,43 ,27.2 ,0.580 ,24 ,0 +3 ,90 ,78 ,0 ,0 ,42.7 ,0.559 ,21 ,0 +9 ,165 ,88 ,0 ,0 ,30.4 ,0.302 ,49 ,1 +1 ,125 ,50 ,40 ,167 ,33.3 ,0.962 ,28 ,1 +13 ,129 ,0 ,30 ,0 ,39.9 ,0.569 ,44 ,1 +12 ,88 ,74 ,40 ,54 ,35.3 ,0.378 ,48 ,0 +1 ,196 ,76 ,36 ,249 ,36.5 ,0.875 ,29 ,1 +5 ,189 ,64 ,33 ,325 ,31.2 ,0.583 ,29 ,1 +5 ,158 ,70 ,0 ,0 ,29.8 ,0.207 ,63 ,0 +5 ,103 ,108 ,37 ,0 ,39.2 ,0.305 ,65 ,0 +4 ,146 ,78 ,0 ,0 ,38.5 ,0.520 ,67 ,1 +4 ,147 ,74 ,25 ,293 ,34.9 ,0.385 ,30 ,0 +5 ,99 ,54 ,28 ,83 ,34.0 ,0.499 ,30 ,0 +6 ,124 ,72 ,0 ,0 ,27.6 ,0.368 ,29 ,1 +0 ,101 ,64 ,17 ,0 ,21.0 ,0.252 ,21 ,0 +3 ,81 ,86 ,16 ,66 ,27.5 ,0.306 ,22 ,0 +1 ,133 ,102 ,28 ,140 ,32.8 ,0.234 ,45 ,1 +3 ,173 ,82 ,48 ,465 ,38.4 ,2.137 ,25 ,1 +0 ,118 ,64 ,23 ,89 ,0.0 ,1.731 ,21 ,0 +0 ,84 ,64 ,22 ,66 ,35.8 ,0.545 ,21 ,0 +2 ,105 ,58 ,40 ,94 ,34.9 ,0.225 ,25 ,0 +2 ,122 ,52 ,43 ,158 ,36.2 ,0.816 ,28 ,0 +12 ,140 ,82 ,43 ,325 ,39.2 ,0.528 ,58 ,1 +0 ,98 ,82 ,15 ,84 ,25.2 ,0.299 ,22 ,0 +1 ,87 ,60 ,37 ,75 ,37.2 ,0.509 ,22 ,0 +4 ,156 ,75 ,0 ,0 ,48.3 ,0.238 ,32 ,1 +0 ,93 ,100 ,39 ,72 ,43.4 ,1.021 ,35 ,0 +1 ,107 ,72 ,30 ,82 ,30.8 ,0.821 ,24 ,0 +0 ,105 ,68 ,22 ,0 ,20.0 ,0.236 ,22 ,0 +1 ,109 ,60 ,8 ,182 ,25.4 ,0.947 ,21 ,0 +1 ,90 ,62 ,18 ,59 ,25.1 ,1.268 ,25 ,0 +1 ,125 ,70 ,24 ,110 ,24.3 ,0.221 ,25 ,0 +1 ,119 ,54 ,13 ,50 ,22.3 ,0.205 ,24 ,0 +5 ,116 ,74 ,29 ,0 ,32.3 ,0.660 ,35 ,1 +8 ,105 ,100 ,36 ,0 ,43.3 ,0.239 ,45 ,1 +5 ,144 ,82 ,26 ,285 ,32.0 ,0.452 ,58 ,1 +3 ,100 ,68 ,23 ,81 ,31.6 ,0.949 ,28 ,0 +1 ,100 ,66 ,29 ,196 ,32.0 ,0.444 ,42 ,0 +5 ,166 ,76 ,0 ,0 ,45.7 ,0.340 ,27 ,1 +1 ,131 ,64 ,14 ,415 ,23.7 ,0.389 ,21 ,0 +4 ,116 ,72 ,12 ,87 ,22.1 ,0.463 ,37 ,0 +4 ,158 ,78 ,0 ,0 ,32.9 ,0.803 ,31 ,1 +2 ,127 ,58 ,24 ,275 ,27.7 ,1.600 ,25 ,0 +3 ,96 ,56 ,34 ,115 ,24.7 ,0.944 ,39 ,0 +0 ,131 ,66 ,40 ,0 ,34.3 ,0.196 ,22 ,1 +3 ,82 ,70 ,0 ,0 ,21.1 ,0.389 ,25 ,0 +3 ,193 ,70 ,31 ,0 ,34.9 ,0.241 ,25 ,1 +4 ,95 ,64 ,0 ,0 ,32.0 ,0.161 ,31 ,1 +6 ,137 ,61 ,0 ,0 ,24.2 ,0.151 ,55 ,0 +5 ,136 ,84 ,41 ,88 ,35.0 ,0.286 ,35 ,1 +9 ,72 ,78 ,25 ,0 ,31.6 ,0.280 ,38 ,0 +5 ,168 ,64 ,0 ,0 ,32.9 ,0.135 ,41 ,1 +2 ,123 ,48 ,32 ,165 ,42.1 ,0.520 ,26 ,0 +4 ,115 ,72 ,0 ,0 ,28.9 ,0.376 ,46 ,1 +0 ,101 ,62 ,0 ,0 ,21.9 ,0.336 ,25 ,0 +8 ,197 ,74 ,0 ,0 ,25.9 ,1.191 ,39 ,1 +1 ,172 ,68 ,49 ,579 ,42.4 ,0.702 ,28 ,1 +6 ,102 ,90 ,39 ,0 ,35.7 ,0.674 ,28 ,0 +1 ,112 ,72 ,30 ,176 ,34.4 ,0.528 ,25 ,0 +1 ,143 ,84 ,23 ,310 ,42.4 ,1.076 ,22 ,0 +1 ,143 ,74 ,22 ,61 ,26.2 ,0.256 ,21 ,0 +0 ,138 ,60 ,35 ,167 ,34.6 ,0.534 ,21 ,1 +3 ,173 ,84 ,33 ,474 ,35.7 ,0.258 ,22 ,1 +1 ,97 ,68 ,21 ,0 ,27.2 ,1.095 ,22 ,0 +4 ,144 ,82 ,32 ,0 ,38.5 ,0.554 ,37 ,1 +1 ,83 ,68 ,0 ,0 ,18.2 ,0.624 ,27 ,0 +3 ,129 ,64 ,29 ,115 ,26.4 ,0.219 ,28 ,1 +1 ,119 ,88 ,41 ,170 ,45.3 ,0.507 ,26 ,0 +2 ,94 ,68 ,18 ,76 ,26.0 ,0.561 ,21 ,0 +0 ,102 ,64 ,46 ,78 ,40.6 ,0.496 ,21 ,0 +2 ,115 ,64 ,22 ,0 ,30.8 ,0.421 ,21 ,0 +8 ,151 ,78 ,32 ,210 ,42.9 ,0.516 ,36 ,1 +4 ,184 ,78 ,39 ,277 ,37.0 ,0.264 ,31 ,1 +0 ,94 ,0 ,0 ,0 ,0.0 ,0.256 ,25 ,0 +1 ,181 ,64 ,30 ,180 ,34.1 ,0.328 ,38 ,1 +0 ,135 ,94 ,46 ,145 ,40.6 ,0.284 ,26 ,0 +1 ,95 ,82 ,25 ,180 ,35.0 ,0.233 ,43 ,1 +2 ,99 ,0 ,0 ,0 ,22.2 ,0.108 ,23 ,0 +3 ,89 ,74 ,16 ,85 ,30.4 ,0.551 ,38 ,0 +1 ,80 ,74 ,11 ,60 ,30.0 ,0.527 ,22 ,0 +2 ,139 ,75 ,0 ,0 ,25.6 ,0.167 ,29 ,0 +1 ,90 ,68 ,8 ,0 ,24.5 ,1.138 ,36 ,0 +0 ,141 ,0 ,0 ,0 ,42.4 ,0.205 ,29 ,1 +12 ,140 ,85 ,33 ,0 ,37.4 ,0.244 ,41 ,0 +5 ,147 ,75 ,0 ,0 ,29.9 ,0.434 ,28 ,0 +1 ,97 ,70 ,15 ,0 ,18.2 ,0.147 ,21 ,0 +6 ,107 ,88 ,0 ,0 ,36.8 ,0.727 ,31 ,0 +0 ,189 ,104 ,25 ,0 ,34.3 ,0.435 ,41 ,1 +2 ,83 ,66 ,23 ,50 ,32.2 ,0.497 ,22 ,0 +4 ,117 ,64 ,27 ,120 ,33.2 ,0.230 ,24 ,0 +8 ,108 ,70 ,0 ,0 ,30.5 ,0.955 ,33 ,1 +4 ,117 ,62 ,12 ,0 ,29.7 ,0.380 ,30 ,1 +0 ,180 ,78 ,63 ,14 ,59.4 ,2.420 ,25 ,1 +1 ,100 ,72 ,12 ,70 ,25.3 ,0.658 ,28 ,0 +0 ,95 ,80 ,45 ,92 ,36.5 ,0.330 ,26 ,0 +0 ,104 ,64 ,37 ,64 ,33.6 ,0.510 ,22 ,1 +0 ,120 ,74 ,18 ,63 ,30.5 ,0.285 ,26 ,0 +1 ,82 ,64 ,13 ,95 ,21.2 ,0.415 ,23 ,0 +2 ,134 ,70 ,0 ,0 ,28.9 ,0.542 ,23 ,1 +0 ,91 ,68 ,32 ,210 ,39.9 ,0.381 ,25 ,0 +2 ,119 ,0 ,0 ,0 ,19.6 ,0.832 ,72 ,0 +2 ,100 ,54 ,28 ,105 ,37.8 ,0.498 ,24 ,0 +14 ,175 ,62 ,30 ,0 ,33.6 ,0.212 ,38 ,1 +1 ,135 ,54 ,0 ,0 ,26.7 ,0.687 ,62 ,0 +5 ,86 ,68 ,28 ,71 ,30.2 ,0.364 ,24 ,0 +10 ,148 ,84 ,48 ,237 ,37.6 ,1.001 ,51 ,1 +9 ,134 ,74 ,33 ,60 ,25.9 ,0.460 ,81 ,0 +9 ,120 ,72 ,22 ,56 ,20.8 ,0.733 ,48 ,0 +1 ,71 ,62 ,0 ,0 ,21.8 ,0.416 ,26 ,0 +8 ,74 ,70 ,40 ,49 ,35.3 ,0.705 ,39 ,0 +5 ,88 ,78 ,30 ,0 ,27.6 ,0.258 ,37 ,0 +10 ,115 ,98 ,0 ,0 ,24.0 ,1.022 ,34 ,0 +0 ,124 ,56 ,13 ,105 ,21.8 ,0.452 ,21 ,0 +0 ,74 ,52 ,10 ,36 ,27.8 ,0.269 ,22 ,0 +0 ,97 ,64 ,36 ,100 ,36.8 ,0.600 ,25 ,0 +8 ,120 ,0 ,0 ,0 ,30.0 ,0.183 ,38 ,1 +6 ,154 ,78 ,41 ,140 ,46.1 ,0.571 ,27 ,0 +1 ,144 ,82 ,40 ,0 ,41.3 ,0.607 ,28 ,0 +0 ,137 ,70 ,38 ,0 ,33.2 ,0.170 ,22 ,0 +0 ,119 ,66 ,27 ,0 ,38.8 ,0.259 ,22 ,0 +7 ,136 ,90 ,0 ,0 ,29.9 ,0.210 ,50 ,0 +4 ,114 ,64 ,0 ,0 ,28.9 ,0.126 ,24 ,0 +0 ,137 ,84 ,27 ,0 ,27.3 ,0.231 ,59 ,0 +2 ,105 ,80 ,45 ,191 ,33.7 ,0.711 ,29 ,1 +7 ,114 ,76 ,17 ,110 ,23.8 ,0.466 ,31 ,0 +8 ,126 ,74 ,38 ,75 ,25.9 ,0.162 ,39 ,0 +4 ,132 ,86 ,31 ,0 ,28.0 ,0.419 ,63 ,0 +3 ,158 ,70 ,30 ,328 ,35.5 ,0.344 ,35 ,1 +0 ,123 ,88 ,37 ,0 ,35.2 ,0.197 ,29 ,0 +4 ,85 ,58 ,22 ,49 ,27.8 ,0.306 ,28 ,0 +0 ,84 ,82 ,31 ,125 ,38.2 ,0.233 ,23 ,0 +0 ,145 ,0 ,0 ,0 ,44.2 ,0.630 ,31 ,1 +0 ,135 ,68 ,42 ,250 ,42.3 ,0.365 ,24 ,1 +1 ,139 ,62 ,41 ,480 ,40.7 ,0.536 ,21 ,0 +0 ,173 ,78 ,32 ,265 ,46.5 ,1.159 ,58 ,0 +4 ,99 ,72 ,17 ,0 ,25.6 ,0.294 ,28 ,0 +8 ,194 ,80 ,0 ,0 ,26.1 ,0.551 ,67 ,0 +2 ,83 ,65 ,28 ,66 ,36.8 ,0.629 ,24 ,0 +2 ,89 ,90 ,30 ,0 ,33.5 ,0.292 ,42 ,0 +4 ,99 ,68 ,38 ,0 ,32.8 ,0.145 ,33 ,0 +4 ,125 ,70 ,18 ,122 ,28.9 ,1.144 ,45 ,1 +3 ,80 ,0 ,0 ,0 ,0.0 ,0.174 ,22 ,0 +6 ,166 ,74 ,0 ,0 ,26.6 ,0.304 ,66 ,0 +5 ,110 ,68 ,0 ,0 ,26.0 ,0.292 ,30 ,0 +2 ,81 ,72 ,15 ,76 ,30.1 ,0.547 ,25 ,0 +7 ,195 ,70 ,33 ,145 ,25.1 ,0.163 ,55 ,1 +6 ,154 ,74 ,32 ,193 ,29.3 ,0.839 ,39 ,0 +2 ,117 ,90 ,19 ,71 ,25.2 ,0.313 ,21 ,0 +3 ,84 ,72 ,32 ,0 ,37.2 ,0.267 ,28 ,0 +6 ,0 ,68 ,41 ,0 ,39.0 ,0.727 ,41 ,1 +7 ,94 ,64 ,25 ,79 ,33.3 ,0.738 ,41 ,0 +3 ,96 ,78 ,39 ,0 ,37.3 ,0.238 ,40 ,0 +10 ,75 ,82 ,0 ,0 ,33.3 ,0.263 ,38 ,0 +0 ,180 ,90 ,26 ,90 ,36.5 ,0.314 ,35 ,1 +1 ,130 ,60 ,23 ,170 ,28.6 ,0.692 ,21 ,0 +2 ,84 ,50 ,23 ,76 ,30.4 ,0.968 ,21 ,0 +8 ,120 ,78 ,0 ,0 ,25.0 ,0.409 ,64 ,0 +12 ,84 ,72 ,31 ,0 ,29.7 ,0.297 ,46 ,1 +0 ,139 ,62 ,17 ,210 ,22.1 ,0.207 ,21 ,0 +9 ,91 ,68 ,0 ,0 ,24.2 ,0.200 ,58 ,0 +2 ,91 ,62 ,0 ,0 ,27.3 ,0.525 ,22 ,0 +3 ,99 ,54 ,19 ,86 ,25.6 ,0.154 ,24 ,0 +3 ,163 ,70 ,18 ,105 ,31.6 ,0.268 ,28 ,1 +9 ,145 ,88 ,34 ,165 ,30.3 ,0.771 ,53 ,1 +7 ,125 ,86 ,0 ,0 ,37.6 ,0.304 ,51 ,0 +13 ,76 ,60 ,0 ,0 ,32.8 ,0.180 ,41 ,0 +6 ,129 ,90 ,7 ,326 ,19.6 ,0.582 ,60 ,0 +2 ,68 ,70 ,32 ,66 ,25.0 ,0.187 ,25 ,0 +3 ,124 ,80 ,33 ,130 ,33.2 ,0.305 ,26 ,0 +6 ,114 ,0 ,0 ,0 ,0.0 ,0.189 ,26 ,0 +9 ,130 ,70 ,0 ,0 ,34.2 ,0.652 ,45 ,1 +3 ,125 ,58 ,0 ,0 ,31.6 ,0.151 ,24 ,0 +3 ,87 ,60 ,18 ,0 ,21.8 ,0.444 ,21 ,0 +1 ,97 ,64 ,19 ,82 ,18.2 ,0.299 ,21 ,0 +3 ,116 ,74 ,15 ,105 ,26.3 ,0.107 ,24 ,0 +0 ,117 ,66 ,31 ,188 ,30.8 ,0.493 ,22 ,0 +0 ,111 ,65 ,0 ,0 ,24.6 ,0.660 ,31 ,0 +2 ,122 ,60 ,18 ,106 ,29.8 ,0.717 ,22 ,0 +0 ,107 ,76 ,0 ,0 ,45.3 ,0.686 ,24 ,0 +1 ,86 ,66 ,52 ,65 ,41.3 ,0.917 ,29 ,0 +6 ,91 ,0 ,0 ,0 ,29.8 ,0.501 ,31 ,0 +1 ,77 ,56 ,30 ,56 ,33.3 ,1.251 ,24 ,0 +4 ,132 ,0 ,0 ,0 ,32.9 ,0.302 ,23 ,1 +0 ,105 ,90 ,0 ,0 ,29.6 ,0.197 ,46 ,0 +0 ,57 ,60 ,0 ,0 ,21.7 ,0.735 ,67 ,0 +0 ,127 ,80 ,37 ,210 ,36.3 ,0.804 ,23 ,0 +3 ,129 ,92 ,49 ,155 ,36.4 ,0.968 ,32 ,1 +8 ,100 ,74 ,40 ,215 ,39.4 ,0.661 ,43 ,1 +3 ,128 ,72 ,25 ,190 ,32.4 ,0.549 ,27 ,1 +10 ,90 ,85 ,32 ,0 ,34.9 ,0.825 ,56 ,1 +4 ,84 ,90 ,23 ,56 ,39.5 ,0.159 ,25 ,0 +1 ,88 ,78 ,29 ,76 ,32.0 ,0.365 ,29 ,0 +8 ,186 ,90 ,35 ,225 ,34.5 ,0.423 ,37 ,1 +5 ,187 ,76 ,27 ,207 ,43.6 ,1.034 ,53 ,1 +4 ,131 ,68 ,21 ,166 ,33.1 ,0.160 ,28 ,0 +1 ,164 ,82 ,43 ,67 ,32.8 ,0.341 ,50 ,0 +4 ,189 ,110 ,31 ,0 ,28.5 ,0.680 ,37 ,0 +1 ,116 ,70 ,28 ,0 ,27.4 ,0.204 ,21 ,0 +3 ,84 ,68 ,30 ,106 ,31.9 ,0.591 ,25 ,0 +6 ,114 ,88 ,0 ,0 ,27.8 ,0.247 ,66 ,0 +1 ,88 ,62 ,24 ,44 ,29.9 ,0.422 ,23 ,0 +1 ,84 ,64 ,23 ,115 ,36.9 ,0.471 ,28 ,0 +7 ,124 ,70 ,33 ,215 ,25.5 ,0.161 ,37 ,0 +1 ,97 ,70 ,40 ,0 ,38.1 ,0.218 ,30 ,0 +8 ,110 ,76 ,0 ,0 ,27.8 ,0.237 ,58 ,0 +11 ,103 ,68 ,40 ,0 ,46.2 ,0.126 ,42 ,0 +11 ,85 ,74 ,0 ,0 ,30.1 ,0.300 ,35 ,0 +6 ,125 ,76 ,0 ,0 ,33.8 ,0.121 ,54 ,1 +0 ,198 ,66 ,32 ,274 ,41.3 ,0.502 ,28 ,1 +1 ,87 ,68 ,34 ,77 ,37.6 ,0.401 ,24 ,0 +6 ,99 ,60 ,19 ,54 ,26.9 ,0.497 ,32 ,0 +0 ,91 ,80 ,0 ,0 ,32.4 ,0.601 ,27 ,0 +2 ,95 ,54 ,14 ,88 ,26.1 ,0.748 ,22 ,0 +1 ,99 ,72 ,30 ,18 ,38.6 ,0.412 ,21 ,0 +6 ,92 ,62 ,32 ,126 ,32.0 ,0.085 ,46 ,0 +4 ,154 ,72 ,29 ,126 ,31.3 ,0.338 ,37 ,0 +0 ,121 ,66 ,30 ,165 ,34.3 ,0.203 ,33 ,1 +3 ,78 ,70 ,0 ,0 ,32.5 ,0.270 ,39 ,0 +2 ,130 ,96 ,0 ,0 ,22.6 ,0.268 ,21 ,0 +3 ,111 ,58 ,31 ,44 ,29.5 ,0.430 ,22 ,0 +2 ,98 ,60 ,17 ,120 ,34.7 ,0.198 ,22 ,0 +1 ,143 ,86 ,30 ,330 ,30.1 ,0.892 ,23 ,0 +1 ,119 ,44 ,47 ,63 ,35.5 ,0.280 ,25 ,0 +6 ,108 ,44 ,20 ,130 ,24.0 ,0.813 ,35 ,0 +2 ,118 ,80 ,0 ,0 ,42.9 ,0.693 ,21 ,1 +10 ,133 ,68 ,0 ,0 ,27.0 ,0.245 ,36 ,0 +2 ,197 ,70 ,99 ,0 ,34.7 ,0.575 ,62 ,1 +0 ,151 ,90 ,46 ,0 ,42.1 ,0.371 ,21 ,1 +6 ,109 ,60 ,27 ,0 ,25.0 ,0.206 ,27 ,0 +12 ,121 ,78 ,17 ,0 ,26.5 ,0.259 ,62 ,0 +8 ,100 ,76 ,0 ,0 ,38.7 ,0.190 ,42 ,0 +8 ,124 ,76 ,24 ,600 ,28.7 ,0.687 ,52 ,1 +1 ,93 ,56 ,11 ,0 ,22.5 ,0.417 ,22 ,0 +8 ,143 ,66 ,0 ,0 ,34.9 ,0.129 ,41 ,1 +6 ,103 ,66 ,0 ,0 ,24.3 ,0.249 ,29 ,0 +3 ,176 ,86 ,27 ,156 ,33.3 ,1.154 ,52 ,1 +0 ,73 ,0 ,0 ,0 ,21.1 ,0.342 ,25 ,0 +11 ,111 ,84 ,40 ,0 ,46.8 ,0.925 ,45 ,1 +2 ,112 ,78 ,50 ,140 ,39.4 ,0.175 ,24 ,0 +3 ,132 ,80 ,0 ,0 ,34.4 ,0.402 ,44 ,1 +2 ,82 ,52 ,22 ,115 ,28.5 ,1.699 ,25 ,0 +6 ,123 ,72 ,45 ,230 ,33.6 ,0.733 ,34 ,0 +0 ,188 ,82 ,14 ,185 ,32.0 ,0.682 ,22 ,1 +0 ,67 ,76 ,0 ,0 ,45.3 ,0.194 ,46 ,0 +1 ,89 ,24 ,19 ,25 ,27.8 ,0.559 ,21 ,0 +1 ,173 ,74 ,0 ,0 ,36.8 ,0.088 ,38 ,1 +1 ,109 ,38 ,18 ,120 ,23.1 ,0.407 ,26 ,0 +1 ,108 ,88 ,19 ,0 ,27.1 ,0.400 ,24 ,0 +6 ,96 ,0 ,0 ,0 ,23.7 ,0.190 ,28 ,0 +1 ,124 ,74 ,36 ,0 ,27.8 ,0.100 ,30 ,0 +7 ,150 ,78 ,29 ,126 ,35.2 ,0.692 ,54 ,1 +4 ,183 ,0 ,0 ,0 ,28.4 ,0.212 ,36 ,1 +1 ,124 ,60 ,32 ,0 ,35.8 ,0.514 ,21 ,0 +1 ,181 ,78 ,42 ,293 ,40.0 ,1.258 ,22 ,1 +1 ,92 ,62 ,25 ,41 ,19.5 ,0.482 ,25 ,0 +0 ,152 ,82 ,39 ,272 ,41.5 ,0.270 ,27 ,0 +1 ,111 ,62 ,13 ,182 ,24.0 ,0.138 ,23 ,0 +3 ,106 ,54 ,21 ,158 ,30.9 ,0.292 ,24 ,0 +3 ,174 ,58 ,22 ,194 ,32.9 ,0.593 ,36 ,1 +7 ,168 ,88 ,42 ,321 ,38.2 ,0.787 ,40 ,1 +6 ,105 ,80 ,28 ,0 ,32.5 ,0.878 ,26 ,0 +11 ,138 ,74 ,26 ,144 ,36.1 ,0.557 ,50 ,1 +3 ,106 ,72 ,0 ,0 ,25.8 ,0.207 ,27 ,0 +6 ,117 ,96 ,0 ,0 ,28.7 ,0.157 ,30 ,0 +2 ,68 ,62 ,13 ,15 ,20.1 ,0.257 ,23 ,0 +9 ,112 ,82 ,24 ,0 ,28.2 ,1.282 ,50 ,1 +0 ,119 ,0 ,0 ,0 ,32.4 ,0.141 ,24 ,1 +2 ,112 ,86 ,42 ,160 ,38.4 ,0.246 ,28 ,0 +2 ,92 ,76 ,20 ,0 ,24.2 ,1.698 ,28 ,0 +6 ,183 ,94 ,0 ,0 ,40.8 ,1.461 ,45 ,0 +0 ,94 ,70 ,27 ,115 ,43.5 ,0.347 ,21 ,0 +2 ,108 ,64 ,0 ,0 ,30.8 ,0.158 ,21 ,0 +4 ,90 ,88 ,47 ,54 ,37.7 ,0.362 ,29 ,0 +0 ,125 ,68 ,0 ,0 ,24.7 ,0.206 ,21 ,0 +0 ,132 ,78 ,0 ,0 ,32.4 ,0.393 ,21 ,0 +5 ,128 ,80 ,0 ,0 ,34.6 ,0.144 ,45 ,0 +4 ,94 ,65 ,22 ,0 ,24.7 ,0.148 ,21 ,0 +7 ,114 ,64 ,0 ,0 ,27.4 ,0.732 ,34 ,1 +0 ,102 ,78 ,40 ,90 ,34.5 ,0.238 ,24 ,0 +2 ,111 ,60 ,0 ,0 ,26.2 ,0.343 ,23 ,0 +1 ,128 ,82 ,17 ,183 ,27.5 ,0.115 ,22 ,0 +10 ,92 ,62 ,0 ,0 ,25.9 ,0.167 ,31 ,0 +13 ,104 ,72 ,0 ,0 ,31.2 ,0.465 ,38 ,1 +5 ,104 ,74 ,0 ,0 ,28.8 ,0.153 ,48 ,0 +2 ,94 ,76 ,18 ,66 ,31.6 ,0.649 ,23 ,0 +7 ,97 ,76 ,32 ,91 ,40.9 ,0.871 ,32 ,1 +1 ,100 ,74 ,12 ,46 ,19.5 ,0.149 ,28 ,0 +0 ,102 ,86 ,17 ,105 ,29.3 ,0.695 ,27 ,0 +4 ,128 ,70 ,0 ,0 ,34.3 ,0.303 ,24 ,0 +6 ,147 ,80 ,0 ,0 ,29.5 ,0.178 ,50 ,1 +4 ,90 ,0 ,0 ,0 ,28.0 ,0.610 ,31 ,0 +3 ,103 ,72 ,30 ,152 ,27.6 ,0.730 ,27 ,0 +2 ,157 ,74 ,35 ,440 ,39.4 ,0.134 ,30 ,0 +1 ,167 ,74 ,17 ,144 ,23.4 ,0.447 ,33 ,1 +0 ,179 ,50 ,36 ,159 ,37.8 ,0.455 ,22 ,1 +11 ,136 ,84 ,35 ,130 ,28.3 ,0.260 ,42 ,1 +0 ,107 ,60 ,25 ,0 ,26.4 ,0.133 ,23 ,0 +1 ,91 ,54 ,25 ,100 ,25.2 ,0.234 ,23 ,0 +1 ,117 ,60 ,23 ,106 ,33.8 ,0.466 ,27 ,0 +5 ,123 ,74 ,40 ,77 ,34.1 ,0.269 ,28 ,0 +2 ,120 ,54 ,0 ,0 ,26.8 ,0.455 ,27 ,0 +1 ,106 ,70 ,28 ,135 ,34.2 ,0.142 ,22 ,0 +2 ,155 ,52 ,27 ,540 ,38.7 ,0.240 ,25 ,1 +2 ,101 ,58 ,35 ,90 ,21.8 ,0.155 ,22 ,0 +1 ,120 ,80 ,48 ,200 ,38.9 ,1.162 ,41 ,0 +11 ,127 ,106 ,0 ,0 ,39.0 ,0.190 ,51 ,0 +3 ,80 ,82 ,31 ,70 ,34.2 ,1.292 ,27 ,1 +10 ,162 ,84 ,0 ,0 ,27.7 ,0.182 ,54 ,0 +1 ,199 ,76 ,43 ,0 ,42.9 ,1.394 ,22 ,1 +8 ,167 ,106 ,46 ,231 ,37.6 ,0.165 ,43 ,1 +9 ,145 ,80 ,46 ,130 ,37.9 ,0.637 ,40 ,1 +6 ,115 ,60 ,39 ,0 ,33.7 ,0.245 ,40 ,1 +1 ,112 ,80 ,45 ,132 ,34.8 ,0.217 ,24 ,0 +4 ,145 ,82 ,18 ,0 ,32.5 ,0.235 ,70 ,1 +10 ,111 ,70 ,27 ,0 ,27.5 ,0.141 ,40 ,1 +6 ,98 ,58 ,33 ,190 ,34.0 ,0.430 ,43 ,0 +9 ,154 ,78 ,30 ,100 ,30.9 ,0.164 ,45 ,0 +6 ,165 ,68 ,26 ,168 ,33.6 ,0.631 ,49 ,0 +1 ,99 ,58 ,10 ,0 ,25.4 ,0.551 ,21 ,0 +10 ,68 ,106 ,23 ,49 ,35.5 ,0.285 ,47 ,0 +3 ,123 ,100 ,35 ,240 ,57.3 ,0.880 ,22 ,0 +8 ,91 ,82 ,0 ,0 ,35.6 ,0.587 ,68 ,0 +6 ,195 ,70 ,0 ,0 ,30.9 ,0.328 ,31 ,1 +9 ,156 ,86 ,0 ,0 ,24.8 ,0.230 ,53 ,1 +0 ,93 ,60 ,0 ,0 ,35.3 ,0.263 ,25 ,0 +3 ,121 ,52 ,0 ,0 ,36.0 ,0.127 ,25 ,1 +2 ,101 ,58 ,17 ,265 ,24.2 ,0.614 ,23 ,0 +2 ,56 ,56 ,28 ,45 ,24.2 ,0.332 ,22 ,0 +0 ,162 ,76 ,36 ,0 ,49.6 ,0.364 ,26 ,1 +0 ,95 ,64 ,39 ,105 ,44.6 ,0.366 ,22 ,0 +4 ,125 ,80 ,0 ,0 ,32.3 ,0.536 ,27 ,1 +5 ,136 ,82 ,0 ,0 ,0.0 ,0.640 ,69 ,0 +2 ,129 ,74 ,26 ,205 ,33.2 ,0.591 ,25 ,0 +3 ,130 ,64 ,0 ,0 ,23.1 ,0.314 ,22 ,0 +1 ,107 ,50 ,19 ,0 ,28.3 ,0.181 ,29 ,0 +1 ,140 ,74 ,26 ,180 ,24.1 ,0.828 ,23 ,0 +1 ,144 ,82 ,46 ,180 ,46.1 ,0.335 ,46 ,1 +8 ,107 ,80 ,0 ,0 ,24.6 ,0.856 ,34 ,0 +13 ,158 ,114 ,0 ,0 ,42.3 ,0.257 ,44 ,1 +2 ,121 ,70 ,32 ,95 ,39.1 ,0.886 ,23 ,0 +7 ,129 ,68 ,49 ,125 ,38.5 ,0.439 ,43 ,1 +2 ,90 ,60 ,0 ,0 ,23.5 ,0.191 ,25 ,0 +7 ,142 ,90 ,24 ,480 ,30.4 ,0.128 ,43 ,1 +3 ,169 ,74 ,19 ,125 ,29.9 ,0.268 ,31 ,1 +0 ,99 ,0 ,0 ,0 ,25.0 ,0.253 ,22 ,0 +4 ,127 ,88 ,11 ,155 ,34.5 ,0.598 ,28 ,0 +4 ,118 ,70 ,0 ,0 ,44.5 ,0.904 ,26 ,0 +2 ,122 ,76 ,27 ,200 ,35.9 ,0.483 ,26 ,0 +6 ,125 ,78 ,31 ,0 ,27.6 ,0.565 ,49 ,1 +1 ,168 ,88 ,29 ,0 ,35.0 ,0.905 ,52 ,1 +2 ,129 ,0 ,0 ,0 ,38.5 ,0.304 ,41 ,0 +4 ,110 ,76 ,20 ,100 ,28.4 ,0.118 ,27 ,0 +6 ,80 ,80 ,36 ,0 ,39.8 ,0.177 ,28 ,0 +10 ,115 ,0 ,0 ,0 ,0.0 ,0.261 ,30 ,1 +2 ,127 ,46 ,21 ,335 ,34.4 ,0.176 ,22 ,0 +9 ,164 ,78 ,0 ,0 ,32.8 ,0.148 ,45 ,1 +2 ,93 ,64 ,32 ,160 ,38.0 ,0.674 ,23 ,1 +3 ,158 ,64 ,13 ,387 ,31.2 ,0.295 ,24 ,0 +5 ,126 ,78 ,27 ,22 ,29.6 ,0.439 ,40 ,0 +10 ,129 ,62 ,36 ,0 ,41.2 ,0.441 ,38 ,1 +0 ,134 ,58 ,20 ,291 ,26.4 ,0.352 ,21 ,0 +3 ,102 ,74 ,0 ,0 ,29.5 ,0.121 ,32 ,0 +7 ,187 ,50 ,33 ,392 ,33.9 ,0.826 ,34 ,1 +3 ,173 ,78 ,39 ,185 ,33.8 ,0.970 ,31 ,1 +10 ,94 ,72 ,18 ,0 ,23.1 ,0.595 ,56 ,0 +1 ,108 ,60 ,46 ,178 ,35.5 ,0.415 ,24 ,0 +5 ,97 ,76 ,27 ,0 ,35.6 ,0.378 ,52 ,1 +4 ,83 ,86 ,19 ,0 ,29.3 ,0.317 ,34 ,0 +1 ,114 ,66 ,36 ,200 ,38.1 ,0.289 ,21 ,0 +1 ,149 ,68 ,29 ,127 ,29.3 ,0.349 ,42 ,1 +5 ,117 ,86 ,30 ,105 ,39.1 ,0.251 ,42 ,0 +1 ,111 ,94 ,0 ,0 ,32.8 ,0.265 ,45 ,0 +4 ,112 ,78 ,40 ,0 ,39.4 ,0.236 ,38 ,0 +1 ,116 ,78 ,29 ,180 ,36.1 ,0.496 ,25 ,0 +0 ,141 ,84 ,26 ,0 ,32.4 ,0.433 ,22 ,0 +2 ,175 ,88 ,0 ,0 ,22.9 ,0.326 ,22 ,0 +2 ,92 ,52 ,0 ,0 ,30.1 ,0.141 ,22 ,0 +3 ,130 ,78 ,23 ,79 ,28.4 ,0.323 ,34 ,1 +8 ,120 ,86 ,0 ,0 ,28.4 ,0.259 ,22 ,1 +2 ,174 ,88 ,37 ,120 ,44.5 ,0.646 ,24 ,1 +2 ,106 ,56 ,27 ,165 ,29.0 ,0.426 ,22 ,0 +2 ,105 ,75 ,0 ,0 ,23.3 ,0.560 ,53 ,0 +4 ,95 ,60 ,32 ,0 ,35.4 ,0.284 ,28 ,0 +0 ,126 ,86 ,27 ,120 ,27.4 ,0.515 ,21 ,0 +8 ,65 ,72 ,23 ,0 ,32.0 ,0.600 ,42 ,0 +2 ,99 ,60 ,17 ,160 ,36.6 ,0.453 ,21 ,0 +1 ,102 ,74 ,0 ,0 ,39.5 ,0.293 ,42 ,1 +11 ,120 ,80 ,37 ,150 ,42.3 ,0.785 ,48 ,1 +3 ,102 ,44 ,20 ,94 ,30.8 ,0.400 ,26 ,0 +1 ,109 ,58 ,18 ,116 ,28.5 ,0.219 ,22 ,0 +9 ,140 ,94 ,0 ,0 ,32.7 ,0.734 ,45 ,1 +13 ,153 ,88 ,37 ,140 ,40.6 ,1.174 ,39 ,0 +12 ,100 ,84 ,33 ,105 ,30.0 ,0.488 ,46 ,0 +1 ,147 ,94 ,41 ,0 ,49.3 ,0.358 ,27 ,1 +1 ,81 ,74 ,41 ,57 ,46.3 ,1.096 ,32 ,0 +3 ,187 ,70 ,22 ,200 ,36.4 ,0.408 ,36 ,1 +6 ,162 ,62 ,0 ,0 ,24.3 ,0.178 ,50 ,1 +4 ,136 ,70 ,0 ,0 ,31.2 ,1.182 ,22 ,1 +1 ,121 ,78 ,39 ,74 ,39.0 ,0.261 ,28 ,0 +3 ,108 ,62 ,24 ,0 ,26.0 ,0.223 ,25 ,0 +0 ,181 ,88 ,44 ,510 ,43.3 ,0.222 ,26 ,1 +8 ,154 ,78 ,32 ,0 ,32.4 ,0.443 ,45 ,1 +1 ,128 ,88 ,39 ,110 ,36.5 ,1.057 ,37 ,1 +7 ,137 ,90 ,41 ,0 ,32.0 ,0.391 ,39 ,0 +0 ,123 ,72 ,0 ,0 ,36.3 ,0.258 ,52 ,1 +1 ,106 ,76 ,0 ,0 ,37.5 ,0.197 ,26 ,0 +6 ,190 ,92 ,0 ,0 ,35.5 ,0.278 ,66 ,1 +2 ,88 ,58 ,26 ,16 ,28.4 ,0.766 ,22 ,0 +9 ,170 ,74 ,31 ,0 ,44.0 ,0.403 ,43 ,1 +9 ,89 ,62 ,0 ,0 ,22.5 ,0.142 ,33 ,0 +10 ,101 ,76 ,48 ,180 ,32.9 ,0.171 ,63 ,0 +2 ,122 ,70 ,27 ,0 ,36.8 ,0.340 ,27 ,0 +5 ,121 ,72 ,23 ,112 ,26.2 ,0.245 ,30 ,0 +1 ,126 ,60 ,0 ,0 ,30.1 ,0.349 ,47 ,1 +1 ,93 ,70 ,31 ,0 ,30.4 ,0.315 ,23 ,0 diff --git a/e2e/decision_tree_classifier_test.dart b/e2e/decision_tree_classifier_test.dart new file mode 100644 index 00000000..285314c5 --- /dev/null +++ b/e2e/decision_tree_classifier_test.dart @@ -0,0 +1,73 @@ +import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/dtype.dart'; +import 'package:ml_linalg/vector.dart'; +import 'package:ml_preprocessing/ml_preprocessing.dart'; +import 'package:test/test.dart'; + +Future evaluateClassifier(MetricType metric, DType dtype) async { + final samples = (await fromCsv('e2e/datasets/iris.csv')) + .shuffle() + .dropSeries(seriesNames: ['Id']); + final pipeline = Pipeline(samples, [ + encodeAsIntegerLabels( + featureNames: ['Species'], + ), + ]); + final numberOfFolds = 5; + final processed = pipeline.process(samples); + final validator = CrossValidator.kFold( + processed, + numberOfFolds: numberOfFolds, + ); + final createClassifier = (DataFrame trainSamples) => + DecisionTreeClassifier( + trainSamples, + 'Species', + minError: 0.3, + minSamplesCount: 5, + maxDepth: 4, + dtype: dtype, + ); + + return validator.evaluate( + createClassifier, + metric, + ); +} + +void main() async { + group('DecisionTreeClassifier', () { + test('should return adequate score on iris dataset using accuracy ' + 'metric, dtype=DType.float32', () async { + final scores = await evaluateClassifier( + MetricType.accuracy, DType.float32); + + expect(scores.mean(), closeTo(0.92, 3e-2)); + }); + + test('should return adequate score on iris dataset using accuracy ' + 'metric, dtype=DType.float64', () async { + final scores = await evaluateClassifier( + MetricType.accuracy, DType.float64); + + expect(scores.mean(), closeTo(0.92, 3e-2)); + }); + + test('should return adequate score on iris dataset using precision ' + 'metric, dtype=DType.float32', () async { + final scores = await evaluateClassifier( + MetricType.precision, DType.float32); + + expect(scores.mean(), closeTo(0.92, 3e-2)); + }); + + test('should return adequate score on iris dataset using precision ' + 'metric, dtype=DType.float64', () async { + final scores = await evaluateClassifier( + MetricType.precision, DType.float64); + + expect(scores.mean(), closeTo(0.62, 5e-2)); + }); + }); +} diff --git a/e2e/knn_classifier_test.dart b/e2e/knn_classifier_test.dart new file mode 100644 index 00000000..392d582d --- /dev/null +++ b/e2e/knn_classifier_test.dart @@ -0,0 +1,73 @@ +import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/dtype.dart'; +import 'package:ml_linalg/vector.dart'; +import 'package:ml_preprocessing/ml_preprocessing.dart'; +import 'package:test/test.dart'; + +Future evaluateKnnClassifier(MetricType metric, DType dtype) async { + final samples = (await fromCsv('e2e/datasets/iris.csv')) + .shuffle() + .dropSeries(seriesNames: ['Id']); + final targetName = 'Species'; + final pipeline = Pipeline(samples, [ + encodeAsIntegerLabels( + featureNames: [targetName], + ), + ]); + final processed = pipeline.process(samples); + final numberOfFolds = 7; + final numberOfNeighbours = 5; + final validator = CrossValidator.kFold( + processed, + numberOfFolds: numberOfFolds, + ); + final createClassifier = (DataFrame trainSamples) => + KnnClassifier( + trainSamples, + targetName, + numberOfNeighbours, + dtype: dtype, + ); + + return validator.evaluate( + createClassifier, + metric, + ); +} + +void main() async { + group('KnnClassifier', () { + test('should return adequate score on iris dataset using accuracy ' + 'metric, dtype=DType.float32', () async { + final scores = await evaluateKnnClassifier(MetricType.accuracy, + DType.float32); + + expect(scores.mean(), closeTo(0.95, 3e-2)); + }); + + test('should return adequate score on iris dataset using accuracy ' + 'metric, dtype=DType.float64', () async { + final scores = await evaluateKnnClassifier(MetricType.accuracy, + DType.float64); + + expect(scores.mean(), closeTo(0.95, 4e-2)); + }); + + test('should return adequate score on iris dataset using precision ' + 'metric, dtype=DType.float32', () async { + final scores = await evaluateKnnClassifier(MetricType.precision, + DType.float32); + + expect(scores.mean(), closeTo(0.95, 4e-2)); + }); + + test('should return adequate score on iris dataset using precision ' + 'metric, dtype=DType.float64', () async { + final scores = await evaluateKnnClassifier(MetricType.precision, + DType.float64); + + expect(scores.mean(), closeTo(0.65, 5e-2)); + }); + }); +} diff --git a/e2e/knn_regressor_test.dart b/e2e/knn_regressor_test.dart new file mode 100644 index 00000000..65b8bec0 --- /dev/null +++ b/e2e/knn_regressor_test.dart @@ -0,0 +1,42 @@ +import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/dtype.dart'; +import 'package:ml_linalg/vector.dart'; +import 'package:ml_preprocessing/ml_preprocessing.dart'; +import 'package:test/test.dart'; + +Future evaluateKnnRegressor(MetricType metricType, DType dtype) async { + final data = (await fromCsv('e2e/datasets/housing.csv', + headerExists: false, + columnDelimiter: ' ', + )).shuffle(); + final normalized = Normalizer().process(data); + final folds = 5; + final targetName = 'col_13'; + final validator = CrossValidator.kFold( + normalized, + numberOfFolds: folds, + dtype: dtype, + ); + + return validator.evaluate((trainSamples) => + KnnRegressor(trainSamples, targetName, folds), metricType); +} + +void main() { + group('KnnRegressor', () { + test('should return adequate score on boston housing dataset using mape ' + 'metric, dtype=DType.float32', () async { + final scores = await evaluateKnnRegressor(MetricType.mape, DType.float32); + + expect(scores.mean(), closeTo(7, 3)); + }); + + test('should return adequate score on boston housing dataset using mape ' + 'metric, dtype=DType.float64', () async { + final scores = await evaluateKnnRegressor(MetricType.mape, DType.float64); + + expect(scores.mean(), closeTo(7, 3)); + }); + }); +} diff --git a/e2e/lasso_regressor.dart b/e2e/lasso_regressor.dart new file mode 100644 index 00000000..cd0211c4 --- /dev/null +++ b/e2e/lasso_regressor.dart @@ -0,0 +1,47 @@ +import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/dtype.dart'; +import 'package:ml_linalg/vector.dart'; +import 'package:test/test.dart'; + +Future evaluateLassoRegressor(MetricType metricType, DType dtype) async { + final samples = (await fromCsv('e2e/datasets/advertising.csv')) + .shuffle() + .dropSeries(seriesNames: ['Num']); + final targetName = 'Sales'; + final validator = CrossValidator.kFold( + samples, + numberOfFolds: 5, + ); + + return validator.evaluate((trainSamples) => + LinearRegressor( + trainSamples, + targetName, + optimizerType: LinearOptimizerType.coordinate, + iterationsLimit: 100, + lambda: 50000.0, + dtype: dtype, + ), + metricType); +} + +void main() { + group('LinearRegressor', () { + test('should return adequate error on mape metric, ' + 'dtype=DType.float32', () async { + final scores = await evaluateLassoRegressor(MetricType.mape, + DType.float32); + + expect(scores.mean(), closeTo(36, 1)); + }); + + test('should return adequate error on mape metric, ' + 'dtype=DType.float64', () async { + final scores = await evaluateLassoRegressor(MetricType.mape, + DType.float64); + + expect(scores.mean(), closeTo(36, 2)); + }); + }); +} diff --git a/e2e/linear_regressor_test.dart b/e2e/linear_regressor_test.dart new file mode 100644 index 00000000..20d97415 --- /dev/null +++ b/e2e/linear_regressor_test.dart @@ -0,0 +1,50 @@ +import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/dtype.dart'; +import 'package:ml_linalg/vector.dart'; +import 'package:test/test.dart'; + +Future evaluateLinearRegressor(MetricType metricType, + DType dtype) async { + final samples = (await fromCsv('e2e/datasets/housing.csv', + headerExists: false, + columnDelimiter: ' ')).shuffle(); + final folds = 5; + final targetName = 'col_13'; + final validator = CrossValidator.kFold( + samples, + numberOfFolds: folds, + ); + final createRegressor = (DataFrame trainSamples) => + LinearRegressor( + trainSamples, + targetName, + optimizerType: LinearOptimizerType.gradient, + initialLearningRate: 0.00000385, + randomSeed: 2, + learningRateType: LearningRateType.decreasingAdaptive, + dtype: dtype, + ); + + return validator.evaluate(createRegressor, metricType); +} + +void main() async { + group('LinearRegressor', () { + test('should return adequate error on mape metric, ' + 'dtype=DType.float32', () async { + expect( + (await evaluateLinearRegressor(MetricType.mape, DType.float32)).mean(), + closeTo(12, 4), + ); + }); + + test('should return adequate error on mape metric, ' + 'dtype=DType.float64', () async { + expect( + (await evaluateLinearRegressor(MetricType.mape, DType.float64)).mean(), + closeTo(21, 5), + ); + }); + }); +} diff --git a/e2e/logistic_regressor_test.dart b/e2e/logistic_regressor_test.dart new file mode 100644 index 00000000..ec7b512e --- /dev/null +++ b/e2e/logistic_regressor_test.dart @@ -0,0 +1,69 @@ +import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/dtype.dart'; +import 'package:ml_linalg/vector.dart'; +import 'package:test/test.dart'; + +Future evaluateLogisticRegressor(MetricType metric, DType dtype) async { + final samples = (await fromCsv('e2e/datasets/pima_indians_diabetes_database.csv')) + .shuffle(); + final numberOfFolds = 5; + final targetNames = ['class variable (0 or 1)']; + final validator = CrossValidator.kFold( + samples, + numberOfFolds: numberOfFolds, + ); + final createClassifier = (DataFrame trainSamples) => + LogisticRegressor( + trainSamples, + targetNames.first, + optimizerType: LinearOptimizerType.gradient, + iterationsLimit: 200, + initialLearningRate: 1e-5, + learningRateType: LearningRateType.decreasingAdaptive, + batchSize: trainSamples.rows.length, + probabilityThreshold: 0.5, + dtype: dtype, + ); + + return validator.evaluate( + createClassifier, + metric, + ); +} + +Future main() async { + group('LogisticRegressor', () { + test('should return adequate score on pima indians diabetes dataset using ' + 'accuracy metric, dtype=DType.float32', () async { + final scores = await evaluateLogisticRegressor(MetricType.accuracy, + DType.float32); + + expect(scores.mean(), closeTo(0.67, 2e-2)); + }); + + test('should return adequate score on pima indians diabetes dataset using ' + 'precision metric, dtype=DType.float64', () async { + final scores = await evaluateLogisticRegressor(MetricType.accuracy, + DType.float32); + + expect(scores.mean(), closeTo(0.67, 2e-2)); + }); + + test('should return adequate score on pima indians diabetes dataset using ' + 'precision metric, dtype=DType.float32', () async { + final scores = await evaluateLogisticRegressor(MetricType.precision, + DType.float32); + + expect(scores.mean(), closeTo(0.64, 2e-2)); + }); + + test('should return adequate score on pima indians diabetes dataset using ' + 'precision metric, dtype=DType.float64', () async { + final scores = await evaluateLogisticRegressor(MetricType.precision, + DType.float32); + + expect(scores.mean(), closeTo(0.65, 4e-2)); + }); + }); +} diff --git a/e2e/softmax_regressor_test.dart b/e2e/softmax_regressor_test.dart new file mode 100644 index 00000000..3eb86e8b --- /dev/null +++ b/e2e/softmax_regressor_test.dart @@ -0,0 +1,76 @@ +import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/dtype.dart'; +import 'package:ml_linalg/vector.dart'; +import 'package:ml_preprocessing/ml_preprocessing.dart'; +import 'package:test/test.dart'; + +Future evaluateSoftmaxRegressor(MetricType metricType, + DType dtype) async { + final samples = (await fromCsv('e2e/datasets/iris.csv')) + .shuffle() + .dropSeries(seriesNames: ['Id']); + final pipeline = Pipeline(samples, [ + encodeAsOneHotLabels( + featureNames: ['Species'], + ), + ]); + final classNames = ['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']; + final processed = pipeline.process(samples); + final numberOfFolds = 5; + final validator = CrossValidator.kFold( + processed, + numberOfFolds: numberOfFolds, + ); + final predictorFactory = (DataFrame trainingSamples) => + SoftmaxRegressor( + trainingSamples, + classNames, + initialLearningRate: 0.035, + iterationsLimit: 5000, + minCoefficientsUpdate: null, + learningRateType: LearningRateType.constant, + dtype: dtype, + ); + + return validator.evaluate( + predictorFactory, + metricType, + ); +} + +Future main() async { + group('SoftmaxRegressor', () { + test('should return adequate score on iris dataset using accuracy ' + 'metric, dtype=DType.float32', () async { + final scores = await evaluateSoftmaxRegressor(MetricType.accuracy, + DType.float32); + + expect(scores.mean(), closeTo(0.92, 7e-2)); + }); + + test('should return adequate score on iris dataset using accuracy ' + 'metric, dtype=DType.float64', () async { + final scores = await evaluateSoftmaxRegressor(MetricType.accuracy, + DType.float64); + + expect(scores.mean(), closeTo(0.92, 7e-2)); + }); + + test('should return adequate score on iris dataset using precision ' + 'metric, dtype=DType.float32', () async { + final scores = await evaluateSoftmaxRegressor(MetricType.precision, + DType.float32); + + expect(scores.mean(), closeTo(0.92, 7e-2)); + }); + + test('should return adequate score on iris dataset using precision ' + 'metric, dtype=DType.float64', () async { + final scores = await evaluateSoftmaxRegressor(MetricType.precision, + DType.float64); + + expect(scores.mean(), closeTo(0.63, 3e-2)); + }); + }); +} diff --git a/lib/ml_algo.dart b/lib/ml_algo.dart index 16c2c218..3728bbf7 100644 --- a/lib/ml_algo.dart +++ b/lib/ml_algo.dart @@ -6,9 +6,7 @@ export 'package:ml_algo/src/knn_kernel/kernel_type.dart'; export 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_type.dart'; export 'package:ml_algo/src/linear_optimizer/linear_optimizer_type.dart'; export 'package:ml_algo/src/linear_optimizer/regularization_type.dart'; -export 'package:ml_algo/src/metric/classification/type.dart'; export 'package:ml_algo/src/metric/metric_type.dart'; -export 'package:ml_algo/src/metric/regression/type.dart'; export 'package:ml_algo/src/model_selection/cross_validator/cross_validator.dart'; export 'package:ml_algo/src/model_selection/split_data.dart'; export 'package:ml_algo/src/regressor/knn_regressor/knn_regressor.dart'; diff --git a/lib/src/classifier/_helpers/create_log_likelihood_optimizer.dart b/lib/src/classifier/_helpers/create_log_likelihood_optimizer.dart index 0d14291b..9f9f46e2 100644 --- a/lib/src/classifier/_helpers/create_log_likelihood_optimizer.dart +++ b/lib/src/classifier/_helpers/create_log_likelihood_optimizer.dart @@ -1,6 +1,6 @@ import 'package:ml_algo/src/cost_function/cost_function_factory.dart'; import 'package:ml_algo/src/cost_function/cost_function_type.dart'; -import 'package:ml_algo/src/di/dependencies.dart'; +import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/helpers/add_intercept_if.dart'; import 'package:ml_algo/src/helpers/features_target_split.dart'; import 'package:ml_algo/src/helpers/normalize_class_labels.dart'; @@ -45,8 +45,8 @@ LinearOptimizer createLogLikelihoodOptimizer( .toList(); final points = splits[0].toMatrix(dtype); final labels = splits[1].toMatrix(dtype); - final optimizerFactory = dependencies.get(); - final costFunctionFactory = dependencies.get(); + final optimizerFactory = injector.get(); + final costFunctionFactory = injector.get(); final costFunction = costFunctionFactory.createByType( CostFunctionType.logLikelihood, linkFunction: linkFunction, diff --git a/lib/src/classifier/_mixins/assessable_classifier_mixin.dart b/lib/src/classifier/_mixins/assessable_classifier_mixin.dart new file mode 100644 index 00000000..9978fa59 --- /dev/null +++ b/lib/src/classifier/_mixins/assessable_classifier_mixin.dart @@ -0,0 +1,17 @@ +import 'package:ml_algo/src/classifier/classifier.dart'; +import 'package:ml_algo/src/di/injector.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; +import 'package:ml_algo/src/model_selection/assessable.dart'; +import 'package:ml_algo/src/model_selection/model_assessor/model_assessor.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; + +mixin AssessableClassifierMixin implements Assessable, Classifier { + @override + double assess( + DataFrame samples, + MetricType metricType, + ) => + injector + .get>() + .assess(this, metricType, samples); +} diff --git a/lib/src/classifier/_mixins/classification_metrics_mixin.dart b/lib/src/classifier/_mixins/classification_metrics_mixin.dart new file mode 100644 index 00000000..78fdde00 --- /dev/null +++ b/lib/src/classifier/_mixins/classification_metrics_mixin.dart @@ -0,0 +1,8 @@ +import 'package:ml_algo/src/metric/metric_type.dart'; + +mixin ClassificationMetricsMixin { + List get allowedMetrics => [ + MetricType.accuracy, + MetricType.precision, + ]; +} diff --git a/lib/src/classifier/classifier.dart b/lib/src/classifier/classifier.dart index b26a73ae..293ca602 100644 --- a/lib/src/classifier/classifier.dart +++ b/lib/src/classifier/classifier.dart @@ -7,4 +7,56 @@ abstract class Classifier extends Predictor { /// Returns predicted distribution of probabilities for each observation in /// the passed [testFeatures] DataFrame predictProbabilities(DataFrame testFeatures); + + /// A value using to encode positive class. + /// + /// Example: + /// + /// Given a positive class label equals 100 + /// + /// Given a negative class label equals -100 + /// + /// Given a dataset + /// + /// feature_1 | feature_2 | feature_3 | target class 1 | target class 2 | target class 3 + /// + /// 123 | 233 | 444 | 100 | -100 | -100 + /// + /// 333 | 100 | 101 | 100 | -100 | -100 + /// + /// 321 | 911 | 321 | -100 | 100 | -100 + /// + /// 221 | 987 | 222 | -100 | -100 | 100 + /// + /// 908 | 404 | 503 | -100 | 100 | -100 + /// + /// If a prediction algorithm meets 100 value in a target column, it will + /// interpret the value as a positive outcome for the appropriate class + num get positiveLabel; + + /// A value using to encode negative class. + /// + /// Example: + /// + /// Given a positive class label equals 100 + /// + /// Given a negative class label equals -100 + /// + /// Given a dataset + /// + /// feature_1 | feature_2 | feature_3 | target class 1 | target class 2 | target class 3 + /// + /// 123 | 233 | 444 | 100 | -100 | -100 + /// + /// 333 | 100 | 101 | 100 | -100 | -100 + /// + /// 321 | 911 | 321 | -100 | 100 | -100 + /// + /// 221 | 987 | 222 | -100 | -100 | 100 + /// + /// 908 | 404 | 503 | -100 | 100 | -100 + /// + /// If a prediction algorithm meets -100 value in a target column, it will + /// interpret the value as a negative outcome for the appropriate class + num get negativeLabel; } diff --git a/lib/src/classifier/decision_tree_classifier/_helper/create_decision_tree_classifier.dart b/lib/src/classifier/decision_tree_classifier/_helper/create_decision_tree_classifier.dart index de45f1aa..38be67e8 100644 --- a/lib/src/classifier/decision_tree_classifier/_helper/create_decision_tree_classifier.dart +++ b/lib/src/classifier/decision_tree_classifier/_helper/create_decision_tree_classifier.dart @@ -1,10 +1,10 @@ +import 'package:ml_algo/src/classifier/decision_tree_classifier/_injector.dart'; import 'package:ml_algo/src/classifier/decision_tree_classifier/decision_tree_classifier.dart'; import 'package:ml_algo/src/classifier/decision_tree_classifier/decision_tree_classifier_factory.dart'; -import 'package:ml_algo/src/di/dependencies.dart'; import 'package:ml_algo/src/helpers/validate_train_data.dart'; import 'package:ml_algo/src/helpers/validate_tree_solver_max_depth.dart'; -import 'package:ml_algo/src/helpers/validate_tree_solver_min_samples_count.dart'; import 'package:ml_algo/src/helpers/validate_tree_solver_min_error.dart'; +import 'package:ml_algo/src/helpers/validate_tree_solver_min_samples_count.dart'; import 'package:ml_algo/src/tree_trainer/_helpers/create_decision_tree_trainer.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; @@ -26,7 +26,7 @@ DecisionTreeClassifier createDecisionTreeClassifier( minSamplesCount, maxDepth); final treeRootNode = trainer.train(trainData.toMatrix(dtype)); - return dependencies + return decisionTreeInjector .get() .create(treeRootNode, targetName, dtype); } diff --git a/lib/src/classifier/decision_tree_classifier/_init_module.dart b/lib/src/classifier/decision_tree_classifier/_init_module.dart new file mode 100644 index 00000000..61dff758 --- /dev/null +++ b/lib/src/classifier/decision_tree_classifier/_init_module.dart @@ -0,0 +1,75 @@ +import 'package:ml_algo/src/classifier/decision_tree_classifier/_injector.dart'; +import 'package:ml_algo/src/classifier/decision_tree_classifier/decision_tree_classifier_factory.dart'; +import 'package:ml_algo/src/classifier/decision_tree_classifier/decision_tree_classifier_factory_impl.dart'; +import 'package:ml_algo/src/common/sequence_elements_distribution_calculator/distribution_calculator_factory.dart'; +import 'package:ml_algo/src/common/sequence_elements_distribution_calculator/distribution_calculator_factory_impl.dart'; +import 'package:ml_algo/src/di/common/init_common_module.dart'; +import 'package:ml_algo/src/di/injector.dart'; +import 'package:ml_algo/src/tree_trainer/leaf_detector/leaf_detector_factory.dart'; +import 'package:ml_algo/src/tree_trainer/leaf_detector/leaf_detector_factory_impl.dart'; +import 'package:ml_algo/src/tree_trainer/leaf_label/leaf_label_factory_factory.dart'; +import 'package:ml_algo/src/tree_trainer/leaf_label/leaf_label_factory_factory_impl.dart'; +import 'package:ml_algo/src/tree_trainer/split_assessor/split_assessor_factory.dart'; +import 'package:ml_algo/src/tree_trainer/split_assessor/split_assessor_factory_impl.dart'; +import 'package:ml_algo/src/tree_trainer/split_selector/split_selector_factory.dart'; +import 'package:ml_algo/src/tree_trainer/split_selector/split_selector_factory_impl.dart'; +import 'package:ml_algo/src/tree_trainer/splitter/nominal_splitter/nominal_splitter_factory.dart'; +import 'package:ml_algo/src/tree_trainer/splitter/nominal_splitter/nominal_splitter_factory_impl.dart'; +import 'package:ml_algo/src/tree_trainer/splitter/numerical_splitter/numerical_splitter_factory.dart'; +import 'package:ml_algo/src/tree_trainer/splitter/numerical_splitter/numerical_splitter_factory_impl.dart'; +import 'package:ml_algo/src/tree_trainer/splitter/splitter_factory.dart'; +import 'package:ml_algo/src/tree_trainer/splitter/splitter_factory_impl.dart'; +import 'package:ml_algo/src/tree_trainer/tree_trainer_factory.dart'; +import 'package:ml_algo/src/tree_trainer/tree_trainer_factory_impl.dart'; + +void initDecisionTreeModule() { + initCommonModule(); + + decisionTreeInjector + ..clearAll() + ..registerSingleton( + () => const SequenceElementsDistributionCalculatorFactoryImpl()) + + ..registerSingleton( + () => const NominalTreeSplitterFactoryImpl()) + + ..registerSingleton( + () => const NumericalTreeSplitterFactoryImpl()) + + ..registerSingleton( + () => const TreeSplitAssessorFactoryImpl()) + + ..registerSingleton( + () => TreeSplitterFactoryImpl( + decisionTreeInjector.get(), + decisionTreeInjector.get(), + decisionTreeInjector.get(), + )) + + ..registerSingleton( + () => TreeSplitSelectorFactoryImpl( + decisionTreeInjector.get(), + decisionTreeInjector.get(), + )) + + ..registerSingleton( + () => TreeLeafDetectorFactoryImpl( + decisionTreeInjector.get(), + )) + + ..registerSingleton( + () => TreeLeafLabelFactoryFactoryImpl( + decisionTreeInjector + .get(), + )) + + ..registerSingleton( + () => TreeTrainerFactoryImpl( + decisionTreeInjector.get(), + decisionTreeInjector.get(), + decisionTreeInjector.get(), + )) + + ..registerSingleton( + () => const DecisionTreeClassifierFactoryImpl()); +} diff --git a/lib/src/classifier/decision_tree_classifier/_injector.dart b/lib/src/classifier/decision_tree_classifier/_injector.dart new file mode 100644 index 00000000..bdd3af3c --- /dev/null +++ b/lib/src/classifier/decision_tree_classifier/_injector.dart @@ -0,0 +1,3 @@ +import 'package:injector/injector.dart'; + +final decisionTreeInjector = Injector(); diff --git a/lib/src/classifier/decision_tree_classifier/decision_tree_classifier.dart b/lib/src/classifier/decision_tree_classifier/decision_tree_classifier.dart index 14b4eb86..cbc0eecc 100644 --- a/lib/src/classifier/decision_tree_classifier/decision_tree_classifier.dart +++ b/lib/src/classifier/decision_tree_classifier/decision_tree_classifier.dart @@ -1,6 +1,7 @@ import 'package:ml_algo/src/classifier/classifier.dart'; import 'package:ml_algo/src/classifier/decision_tree_classifier/_helper/create_decision_tree_classifier.dart'; import 'package:ml_algo/src/classifier/decision_tree_classifier/_helper/create_decision_tree_classifier_from_json.dart'; +import 'package:ml_algo/src/classifier/decision_tree_classifier/_init_module.dart'; import 'package:ml_algo/src/common/serializable/serializable.dart'; import 'package:ml_algo/src/model_selection/assessable.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; @@ -44,14 +45,18 @@ abstract class DecisionTreeClassifier implements int minSamplesCount, int maxDepth, DType dtype = DType.float32, - }) => createDecisionTreeClassifier( - trainData, - targetName, - minError, - minSamplesCount, - maxDepth, - dtype, - ); + }) { + initDecisionTreeModule(); + + return createDecisionTreeClassifier( + trainData, + targetName, + minError, + minSamplesCount, + maxDepth, + dtype, + ); + } /// Restores previously fitted classifier instance from the given [json] /// diff --git a/lib/src/classifier/decision_tree_classifier/decision_tree_classifier_impl.dart b/lib/src/classifier/decision_tree_classifier/decision_tree_classifier_impl.dart index 7bfb4e4f..dd642db0 100644 --- a/lib/src/classifier/decision_tree_classifier/decision_tree_classifier_impl.dart +++ b/lib/src/classifier/decision_tree_classifier/decision_tree_classifier_impl.dart @@ -1,8 +1,8 @@ import 'package:json_annotation/json_annotation.dart'; +import 'package:ml_algo/src/classifier/_mixins/assessable_classifier_mixin.dart'; import 'package:ml_algo/src/classifier/decision_tree_classifier/decision_tree_classifier.dart'; import 'package:ml_algo/src/classifier/decision_tree_classifier/decision_tree_json_keys.dart'; import 'package:ml_algo/src/common/serializable/serializable_mixin.dart'; -import 'package:ml_algo/src/predictor/assessable_predictor_mixin.dart'; import 'package:ml_algo/src/tree_trainer/leaf_label/leaf_label.dart'; import 'package:ml_algo/src/tree_trainer/tree_node/_helper/from_tree_node_json.dart'; import 'package:ml_algo/src/tree_trainer/tree_node/_helper/tree_node_to_json.dart'; @@ -19,7 +19,7 @@ part 'decision_tree_classifier_impl.g.dart'; @JsonSerializable() class DecisionTreeClassifierImpl with - AssessablePredictorMixin, + AssessableClassifierMixin, SerializableMixin implements DecisionTreeClassifier { @@ -47,6 +47,9 @@ class DecisionTreeClassifierImpl @JsonKey(name: targetColumnNameJsonKey) final String targetColumnName; + @override + Iterable get targetNames => [targetColumnName]; + @JsonKey( name: treeRootNodeJsonKey, toJson: treeNodeToJson, @@ -54,6 +57,14 @@ class DecisionTreeClassifierImpl ) final TreeNode treeRootNode; + @override + @JsonKey(includeIfNull: false) + final num positiveLabel = null; + + @override + @JsonKey(includeIfNull: false) + final num negativeLabel = null; + @override DataFrame predict(DataFrame features) { final predictedLabels = features @@ -72,9 +83,7 @@ class DecisionTreeClassifierImpl return DataFrame.fromMatrix( Matrix.fromColumns([outcomeVector], dtype: dtype), - header: [ - targetColumnName, - ], + header: targetNames, ); } @@ -83,26 +92,21 @@ class DecisionTreeClassifierImpl final sampleVectors = features .toMatrix(dtype) .rows; - final probabilities = sampleVectors .map((sample) => _getLabelForSample(sample, treeRootNode)) .map((label) => label.probability) .toList(growable: false); - final probabilitiesVector = Vector.fromList( probabilities, dtype: dtype, ); - final probabilitiesMatrixColumn = Matrix.fromColumns([ probabilitiesVector, ], dtype: dtype); return DataFrame.fromMatrix( probabilitiesMatrixColumn, - header: [ - targetColumnName, - ], + header: targetNames, ); } diff --git a/lib/src/classifier/knn_classifier/_helpers/create_knn_classifier.dart b/lib/src/classifier/knn_classifier/_helpers/create_knn_classifier.dart index 6b674905..78d5b033 100644 --- a/lib/src/classifier/knn_classifier/_helpers/create_knn_classifier.dart +++ b/lib/src/classifier/knn_classifier/_helpers/create_knn_classifier.dart @@ -1,6 +1,6 @@ +import 'package:ml_algo/src/classifier/knn_classifier/_injector.dart'; import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier.dart'; import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier_factory.dart'; -import 'package:ml_algo/src/di/dependencies.dart'; import 'package:ml_algo/src/helpers/features_target_split.dart'; import 'package:ml_algo/src/helpers/validate_train_data.dart'; import 'package:ml_algo/src/knn_kernel/kernel_factory.dart'; @@ -23,13 +23,10 @@ KnnClassifier createKnnClassifier( final splits = featuresTargetSplit(trainData, targetNames: [targetName], ).toList(); - final featuresSplit = splits[0]; final targetSplit = splits[1]; - final trainFeatures = featuresSplit.toMatrix(dtype); final trainLabels = targetSplit.toMatrix(dtype); - final classLabels = targetSplit[targetName].isDiscrete ? targetSplit[targetName] .discreteValues @@ -40,12 +37,12 @@ KnnClassifier createKnnClassifier( .getColumn(0) .unique() .toList(growable: false); - - final kernelFactory = dependencies.get(); - final kernel = kernelFactory.createByType(kernelType); - - final solverFactory = dependencies.get(); - + final kernelFactory = knnClassifierInjector + .get(); + final kernel = kernelFactory + .createByType(kernelType); + final solverFactory = knnClassifierInjector + .get(); final solver = solverFactory.create( trainFeatures, trainLabels, @@ -53,8 +50,7 @@ KnnClassifier createKnnClassifier( distance, true, ); - - final knnClassifierFactory = dependencies + final knnClassifierFactory = knnClassifierInjector .get(); return knnClassifierFactory.create( diff --git a/lib/src/classifier/knn_classifier/_init_module.dart b/lib/src/classifier/knn_classifier/_init_module.dart new file mode 100644 index 00000000..268d49e5 --- /dev/null +++ b/lib/src/classifier/knn_classifier/_init_module.dart @@ -0,0 +1,31 @@ +import 'package:ml_algo/src/classifier/knn_classifier/_injector.dart'; +import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier_factory.dart'; +import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier_factory_impl.dart'; +import 'package:ml_algo/src/di/common/init_common_module.dart'; +import 'package:ml_algo/src/knn_kernel/kernel_factory.dart'; +import 'package:ml_algo/src/knn_kernel/kernel_factory_impl.dart'; +import 'package:ml_algo/src/knn_solver/knn_solver_factory.dart'; +import 'package:ml_algo/src/knn_solver/knn_solver_factory_impl.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor_factory.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor_factory_impl.dart'; + +void initKnnClassifierModule() { + initCommonModule(); + + knnClassifierInjector + ..clearAll() + ..registerSingleton( + () => const KernelFactoryImpl()) + + ..registerDependency( + () => const KnnSolverFactoryImpl()) + + ..registerSingleton( + () => const KnnClassifierFactoryImpl()) + + ..registerSingleton( + () => KnnRegressorFactoryImpl( + knnClassifierInjector.get(), + knnClassifierInjector.get(), + )); +} diff --git a/lib/src/classifier/knn_classifier/_injector.dart b/lib/src/classifier/knn_classifier/_injector.dart new file mode 100644 index 00000000..441ba896 --- /dev/null +++ b/lib/src/classifier/knn_classifier/_injector.dart @@ -0,0 +1,3 @@ +import 'package:injector/injector.dart'; + +final knnClassifierInjector = Injector(); diff --git a/lib/src/classifier/knn_classifier/knn_classifier.dart b/lib/src/classifier/knn_classifier/knn_classifier.dart index ad88bf17..48ea90c4 100644 --- a/lib/src/classifier/knn_classifier/knn_classifier.dart +++ b/lib/src/classifier/knn_classifier/knn_classifier.dart @@ -1,5 +1,6 @@ import 'package:ml_algo/src/classifier/classifier.dart'; import 'package:ml_algo/src/classifier/knn_classifier/_helpers/create_knn_classifier.dart'; +import 'package:ml_algo/src/classifier/knn_classifier/_init_module.dart'; import 'package:ml_algo/src/knn_kernel/kernel_type.dart'; import 'package:ml_algo/src/model_selection/assessable.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; @@ -46,5 +47,16 @@ abstract class KnnClassifier implements Assessable, Classifier { Distance distance = Distance.euclidean, DType dtype = DType.float32, } - ) => createKnnClassifier(trainData, targetName, k, kernel, distance, dtype); + ) { + initKnnClassifierModule(); + + return createKnnClassifier( + trainData, + targetName, + k, + kernel, + distance, + dtype, + ); + } } diff --git a/lib/src/classifier/knn_classifier/knn_classifier_impl.dart b/lib/src/classifier/knn_classifier/knn_classifier_impl.dart index 9b440262..65a79fed 100644 --- a/lib/src/classifier/knn_classifier/knn_classifier_impl.dart +++ b/lib/src/classifier/knn_classifier/knn_classifier_impl.dart @@ -1,16 +1,20 @@ +import 'package:ml_algo/src/classifier/_mixins/assessable_classifier_mixin.dart'; import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier.dart'; import 'package:ml_algo/src/helpers/validate_class_label_list.dart'; import 'package:ml_algo/src/helpers/validate_test_features.dart'; import 'package:ml_algo/src/knn_kernel/kernel.dart'; import 'package:ml_algo/src/knn_solver/knn_solver.dart'; import 'package:ml_algo/src/knn_solver/neigbour.dart'; -import 'package:ml_algo/src/predictor/assessable_predictor_mixin.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/matrix.dart'; import 'package:ml_linalg/vector.dart'; -class KnnClassifierImpl with AssessablePredictorMixin implements KnnClassifier { +class KnnClassifierImpl + with + AssessableClassifierMixin + implements + KnnClassifier { KnnClassifierImpl( this._targetColumnName, this._classLabels, @@ -26,11 +30,20 @@ class KnnClassifierImpl with AssessablePredictorMixin implements KnnClassifier { @override final DType dtype; + @override + Iterable get targetNames => [_targetColumnName]; + final List _classLabels; final Kernel _kernel; final KnnSolver _solver; final String _columnPrefix = 'Class label'; + @override + final num positiveLabel = null; + + @override + final num negativeLabel = null; + @override DataFrame predict(DataFrame features) { validateTestFeatures(features, dtype); @@ -39,14 +52,30 @@ class KnnClassifierImpl with AssessablePredictorMixin implements KnnClassifier { final labels = labelsToProbabilities.keys.toList(); final predictedOutcomes = _getProbabilityMatrix(labelsToProbabilities) .rows - .map((row) => labels[row.toList().indexOf(row.max())]) + .map((probabilities) { + // TODO: extract max element index search logic to ml_linalg + // TODO: fix corner cases with NaN and Infinity + final maxProbability = probabilities.max(); + final maxProbabilityIndex = probabilities + .toList() + .indexOf(maxProbability); + + if (maxProbabilityIndex == -1) { + print('KnnClassifier error: cannot find max probability, ' + 'max probability is $maxProbability'); + + return labels.first; + } + + return labels[maxProbabilityIndex]; + }) .toList(); final outcomesAsVector = Vector.fromList(predictedOutcomes, dtype: dtype); return DataFrame.fromMatrix( Matrix.fromColumns([outcomesAsVector], dtype: dtype), - header: [_targetColumnName], + header: targetNames, ); } @@ -83,7 +112,7 @@ class KnnClassifierImpl with AssessablePredictorMixin implements KnnClassifier { /// ``` /// /// where each row is a classes probability distribution for the appropriate - /// feature record from test feature matrix + /// feature record from the test feature matrix Map> _getLabelToProbabilityMapping(DataFrame features) { final kNeighbourGroups = _solver.findKNeighbours(features.toMatrix(dtype)); final classLabelsAsSet = Set.from(_classLabels); diff --git a/lib/src/classifier/logistic_regressor/_helpers/create_logistic_regressor.dart b/lib/src/classifier/logistic_regressor/_helpers/create_logistic_regressor.dart index e4ed7c30..addc77a1 100644 --- a/lib/src/classifier/logistic_regressor/_helpers/create_logistic_regressor.dart +++ b/lib/src/classifier/logistic_regressor/_helpers/create_logistic_regressor.dart @@ -1,7 +1,7 @@ import 'package:ml_algo/src/classifier/_helpers/create_log_likelihood_optimizer.dart'; +import 'package:ml_algo/src/classifier/logistic_regressor/_injector.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor_impl.dart'; -import 'package:ml_algo/src/di/dependencies.dart'; import 'package:ml_algo/src/helpers/validate_class_labels.dart'; import 'package:ml_algo/src/helpers/validate_initial_coefficients.dart'; import 'package:ml_algo/src/helpers/validate_train_data.dart'; @@ -16,29 +16,30 @@ import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/matrix.dart'; import 'package:ml_linalg/vector.dart'; -LogisticRegressor createLogisticRegressor( - DataFrame trainData, - String targetName, - LinearOptimizerType optimizerType, - int iterationsLimit, - double initialLearningRate, - double minCoefficientsUpdate, - double probabilityThreshold, - double lambda, - RegularizationType regularizationType, - int randomSeed, - int batchSize, - bool fitIntercept, - double interceptScale, - bool isFittingDataNormalized, - LearningRateType learningRateType, - InitialCoefficientsType initialCoefficientsType, - Vector initialCoefficients, - num positiveLabel, - num negativeLabel, - bool collectLearningData, - DType dtype, -) { +LogisticRegressor createLogisticRegressor({ + DataFrame trainData, + String targetName, + LinearOptimizerType optimizerType = LinearOptimizerType.gradient, + int iterationsLimit = 100, + double initialLearningRate = 1e-3, + double minCoefficientsUpdate = 1e-12, + double probabilityThreshold = 0.5, + double lambda = 0.0, + RegularizationType regularizationType, + int randomSeed, + int batchSize = 1, + bool fitIntercept = false, + double interceptScale = 1.0, + bool isFittingDataNormalized = false, + LearningRateType learningRateType = LearningRateType.constant, + InitialCoefficientsType initialCoefficientsType = + InitialCoefficientsType.zeroes, + Vector initialCoefficients, + num positiveLabel = 1, + num negativeLabel = 0, + bool collectLearningData = false, + DType dtype = DType.float32, +}) { validateTrainData(trainData, [targetName]); validateClassLabels(positiveLabel, negativeLabel); @@ -47,7 +48,7 @@ LogisticRegressor createLogisticRegressor( trainData.toMatrix(dtype).columnsNum - 1); } - final linkFunction = dependencies.get( + final linkFunction = logisticRegressorInjector.get( dependencyName: dTypeToInverseLogitLinkFunctionToken[dtype]); final optimizer = createLogLikelihoodOptimizer( trainData, diff --git a/lib/src/classifier/logistic_regressor/_init_module.dart b/lib/src/classifier/logistic_regressor/_init_module.dart new file mode 100644 index 00000000..ea43a6ca --- /dev/null +++ b/lib/src/classifier/logistic_regressor/_init_module.dart @@ -0,0 +1,20 @@ +import 'package:ml_algo/src/classifier/logistic_regressor/_injector.dart'; +import 'package:ml_algo/src/di/common/init_common_module.dart'; +import 'package:ml_algo/src/link_function/link_function.dart'; +import 'package:ml_algo/src/link_function/link_function_dependency_tokens.dart'; +import 'package:ml_algo/src/link_function/logit/float32_inverse_logit_function.dart'; +import 'package:ml_algo/src/link_function/logit/float64_inverse_logit_function.dart'; + +void initLogisticRegressorModule() { + initCommonModule(); + + logisticRegressorInjector + ..clearAll() + ..registerSingleton( + () => const Float32InverseLogitLinkFunction(), + dependencyName: float32InverseLogitLinkFunctionToken) + + ..registerSingleton( + () => const Float64InverseLogitLinkFunction(), + dependencyName: float64InverseLogitLinkFunctionToken); +} diff --git a/lib/src/classifier/logistic_regressor/_injector.dart b/lib/src/classifier/logistic_regressor/_injector.dart new file mode 100644 index 00000000..7ad35abb --- /dev/null +++ b/lib/src/classifier/logistic_regressor/_injector.dart @@ -0,0 +1,3 @@ +import 'package:injector/injector.dart'; + +final logisticRegressorInjector = Injector(); diff --git a/lib/src/classifier/logistic_regressor/logistic_regressor.dart b/lib/src/classifier/logistic_regressor/logistic_regressor.dart index f3ccbeb1..e09b98a3 100644 --- a/lib/src/classifier/logistic_regressor/logistic_regressor.dart +++ b/lib/src/classifier/logistic_regressor/logistic_regressor.dart @@ -1,6 +1,7 @@ import 'package:ml_algo/src/classifier/linear_classifier.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/_helpers/create_logistic_regressor.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/_helpers/create_logistic_regressor_from_json.dart'; +import 'package:ml_algo/src/classifier/logistic_regressor/_init_module.dart'; import 'package:ml_algo/src/common/serializable/serializable.dart'; import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_type.dart'; import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_type.dart'; @@ -106,10 +107,10 @@ abstract class LogisticRegressor implements /// should contain an extra element in the beginning of the vector and it /// denotes the intercept term coefficient /// - /// [positiveLabel] Defines the value, that will be used for `positive` class. + /// [positiveLabel] A value that will be used for the positive class. /// By default, `1`. /// - /// [negativeLabel] Defines the value, that will be used for `negative` class. + /// [negativeLabel] A value that will be used for the negative class. /// By default, `0`. /// /// [collectLearningData] Whether or not to collect learning data, for @@ -143,29 +144,33 @@ abstract class LogisticRegressor implements num negativeLabel = 0, bool collectLearningData = false, DType dtype = DType.float32, - }) => createLogisticRegressor( - trainData, - targetName, - optimizerType, - iterationsLimit, - initialLearningRate, - minCoefficientsUpdate, - probabilityThreshold, - lambda, - regularizationType, - randomSeed, - batchSize, - fitIntercept, - interceptScale, - isFittingDataNormalized, - learningRateType, - initialCoefficientsType, - initialCoefficients ?? Vector.empty(dtype: dtype), - positiveLabel, - negativeLabel, - collectLearningData, - dtype, - ); + }) { + initLogisticRegressorModule(); + + return createLogisticRegressor( + trainData: trainData, + targetName: targetName, + optimizerType: optimizerType, + iterationsLimit: iterationsLimit, + initialLearningRate: initialLearningRate, + minCoefficientsUpdate: minCoefficientsUpdate, + probabilityThreshold: probabilityThreshold, + lambda: lambda, + regularizationType: regularizationType, + randomSeed: randomSeed, + batchSize: batchSize, + fitIntercept: fitIntercept, + interceptScale: interceptScale, + isFittingDataNormalized: isFittingDataNormalized, + learningRateType: learningRateType, + initialCoefficientsType: initialCoefficientsType, + initialCoefficients: initialCoefficients ?? Vector.empty(dtype: dtype), + positiveLabel: positiveLabel, + negativeLabel: negativeLabel, + collectLearningData: collectLearningData, + dtype: dtype, + ); + } /// Restores previously fitted classifier instance from the [json] /// diff --git a/lib/src/classifier/logistic_regressor/logistic_regressor_impl.dart b/lib/src/classifier/logistic_regressor/logistic_regressor_impl.dart index f73a7e60..64b07d41 100644 --- a/lib/src/classifier/logistic_regressor/logistic_regressor_impl.dart +++ b/lib/src/classifier/logistic_regressor/logistic_regressor_impl.dart @@ -1,4 +1,6 @@ import 'package:json_annotation/json_annotation.dart'; +import 'package:ml_algo/src/classifier/_mixins/assessable_classifier_mixin.dart'; +import 'package:ml_algo/src/classifier/_mixins/classification_metrics_mixin.dart'; import 'package:ml_algo/src/classifier/_mixins/linear_classifier_mixin.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor_json_keys.dart'; @@ -9,7 +11,6 @@ import 'package:ml_algo/src/helpers/validate_probability_threshold.dart'; import 'package:ml_algo/src/link_function/helpers/from_link_function_json.dart'; import 'package:ml_algo/src/link_function/helpers/link_function_to_json.dart'; import 'package:ml_algo/src/link_function/link_function.dart'; -import 'package:ml_algo/src/predictor/assessable_predictor_mixin.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/dtype_to_json.dart'; @@ -24,8 +25,9 @@ part 'logistic_regressor_impl.g.dart'; class LogisticRegressorImpl with LinearClassifierMixin, - AssessablePredictorMixin, - SerializableMixin + AssessableClassifierMixin, + SerializableMixin, + ClassificationMetricsMixin implements LogisticRegressor { @@ -72,6 +74,7 @@ class LogisticRegressorImpl final Matrix coefficientsByClasses; @override + @deprecated @JsonKey(name: logisticRegressorClassNamesJsonKey) final Iterable classNames; @@ -94,9 +97,11 @@ class LogisticRegressorImpl @JsonKey(name: logisticRegressorProbabilityThresholdJsonKey) final num probabilityThreshold; + @override @JsonKey(name: logisticRegressorPositiveLabelJsonKey) final num positiveLabel; + @override @JsonKey(name: logisticRegressorNegativeLabelJsonKey) final num negativeLabel; @@ -115,6 +120,9 @@ class LogisticRegressorImpl ) final List costPerIteration; + @override + Iterable get targetNames => classNames; + @override DataFrame predict(DataFrame testFeatures) { final predictedLabels = getProbabilitiesMatrix(testFeatures) @@ -128,7 +136,7 @@ class LogisticRegressorImpl return DataFrame.fromMatrix( predictedLabels, - header: classNames, + header: targetNames, ); } } diff --git a/lib/src/classifier/softmax_regressor/_helpers/create_softmax_regressor.dart b/lib/src/classifier/softmax_regressor/_helpers/create_softmax_regressor.dart index c7f887e3..d3292e89 100644 --- a/lib/src/classifier/softmax_regressor/_helpers/create_softmax_regressor.dart +++ b/lib/src/classifier/softmax_regressor/_helpers/create_softmax_regressor.dart @@ -1,7 +1,7 @@ import 'package:ml_algo/src/classifier/_helpers/create_log_likelihood_optimizer.dart'; +import 'package:ml_algo/src/classifier/softmax_regressor/_injector.dart'; import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor.dart'; import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor_factory.dart'; -import 'package:ml_algo/src/di/dependencies.dart'; import 'package:ml_algo/src/helpers/validate_train_data.dart'; import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_type.dart'; import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_type.dart'; @@ -13,28 +13,29 @@ import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/matrix.dart'; -SoftmaxRegressor createSoftmaxRegressor( +SoftmaxRegressor createSoftmaxRegressor({ DataFrame trainData, List targetNames, - LinearOptimizerType optimizerType, - int iterationsLimit, - double initialLearningRate, - double minCoefficientsUpdate, + LinearOptimizerType optimizerType = LinearOptimizerType.gradient, + int iterationsLimit = 100, + double initialLearningRate = 1e-3, + double minCoefficientsUpdate = 1e-12, double lambda, RegularizationType regularizationType, int randomSeed, - int batchSize, - bool fitIntercept, - double interceptScale, - LearningRateType learningRateType, + int batchSize = 1, + bool fitIntercept = false, + double interceptScale = 1.0, + LearningRateType learningRateType = LearningRateType.constant, bool isFittingDataNormalized, - InitialCoefficientsType initialCoefficientsType, + InitialCoefficientsType initialCoefficientsType = + InitialCoefficientsType.zeroes, Matrix initialCoefficients, - num positiveLabel, - num negativeLabel, - bool collectLearningData, - DType dtype, -) { + num positiveLabel = 1, + num negativeLabel = 0, + bool collectLearningData = false, + DType dtype = DType.float32, + }) { if (targetNames.isNotEmpty && targetNames.length < 2) { throw Exception('The target column should be encoded properly ' '(e.g., via one-hot encoder)'); @@ -42,8 +43,9 @@ SoftmaxRegressor createSoftmaxRegressor( validateTrainData(trainData, targetNames); - final linkFunction = dependencies.get( - dependencyName: dTypeToSoftmaxLinkFunctionToken[dtype]); + final linkFunction = softmaxRegressorInjector + .get( + dependencyName: dTypeToSoftmaxLinkFunctionToken[dtype]); final optimizer = createLogLikelihoodOptimizer( trainData, @@ -66,17 +68,18 @@ SoftmaxRegressor createSoftmaxRegressor( negativeLabel: negativeLabel, dtype: dtype, ); - final coefficientsByClasses = optimizer.findExtrema( initialCoefficients: initialCoefficients, isMinimizingObjective: false, collectLearningData: collectLearningData, ); - final costPerIteration = optimizer.costPerIteration.isNotEmpty - ? optimizer.costPerIteration - : null; + final costPerIteration = optimizer + .costPerIteration + .isNotEmpty + ? optimizer.costPerIteration + : null; - final regressorFactory = dependencies + final regressorFactory = softmaxRegressorInjector .get(); return regressorFactory.create( diff --git a/lib/src/classifier/softmax_regressor/_init_module.dart b/lib/src/classifier/softmax_regressor/_init_module.dart new file mode 100644 index 00000000..1c3df411 --- /dev/null +++ b/lib/src/classifier/softmax_regressor/_init_module.dart @@ -0,0 +1,25 @@ +import 'package:ml_algo/src/classifier/softmax_regressor/_injector.dart'; +import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor_factory.dart'; +import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor_factory_impl.dart'; +import 'package:ml_algo/src/di/common/init_common_module.dart'; +import 'package:ml_algo/src/link_function/link_function.dart'; +import 'package:ml_algo/src/link_function/link_function_dependency_tokens.dart'; +import 'package:ml_algo/src/link_function/softmax/float32_softmax_link_function.dart'; +import 'package:ml_algo/src/link_function/softmax/float64_softmax_link_function.dart'; + +void initSoftmaxRegressorModule() { + initCommonModule(); + + softmaxRegressorInjector + ..clearAll() + ..registerSingleton( + () => const Float32SoftmaxLinkFunction(), + dependencyName: float32SoftmaxLinkFunctionToken) + + ..registerSingleton( + () => const Float64SoftmaxLinkFunction(), + dependencyName: float64SoftmaxLinkFunctionToken) + + ..registerSingleton( + () => const SoftmaxRegressorFactoryImpl()); +} diff --git a/lib/src/classifier/softmax_regressor/_injector.dart b/lib/src/classifier/softmax_regressor/_injector.dart new file mode 100644 index 00000000..f27619e3 --- /dev/null +++ b/lib/src/classifier/softmax_regressor/_injector.dart @@ -0,0 +1,3 @@ +import 'package:injector/injector.dart'; + +final softmaxRegressorInjector = Injector(); diff --git a/lib/src/classifier/softmax_regressor/softmax_regressor.dart b/lib/src/classifier/softmax_regressor/softmax_regressor.dart index 7ee56413..738f6955 100644 --- a/lib/src/classifier/softmax_regressor/softmax_regressor.dart +++ b/lib/src/classifier/softmax_regressor/softmax_regressor.dart @@ -1,6 +1,7 @@ import 'package:ml_algo/src/classifier/linear_classifier.dart'; import 'package:ml_algo/src/classifier/softmax_regressor/_helpers/create_softmax_regressor.dart'; import 'package:ml_algo/src/classifier/softmax_regressor/_helpers/create_softmax_regressor_from_json.dart'; +import 'package:ml_algo/src/classifier/softmax_regressor/_init_module.dart'; import 'package:ml_algo/src/common/serializable/serializable.dart'; import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_type.dart'; import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_type.dart'; @@ -99,10 +100,10 @@ abstract class SoftmaxRegressor implements /// [initialCoefficients] matrix is a vector of coefficients of a certain /// class. /// - /// [positiveLabel] Defines the value that will be used for `positive` class. + /// [positiveLabel] A value that will be used for the positive class. /// By default, `1`. /// - /// [negativeLabel] Defines the value that will be used for `negative` class. + /// [negativeLabel] A value that will be used for the negative class. /// By default, `0`. /// /// [collectLearningData] Whether or not to collect learning data, for @@ -137,28 +138,32 @@ abstract class SoftmaxRegressor implements bool collectLearningData = false, DType dtype = DType.float32, } - ) => createSoftmaxRegressor( - trainData, - targetNames, - optimizerType, - iterationsLimit, - initialLearningRate, - minCoefficientsUpdate, - lambda, - regularizationType, - randomSeed, - batchSize, - fitIntercept, - interceptScale, - learningRateType, - isFittingDataNormalized, - initialCoefficientsType, - initialCoefficients, - positiveLabel, - negativeLabel, - collectLearningData, - dtype, - ); + ) { + initSoftmaxRegressorModule(); + + return createSoftmaxRegressor( + trainData: trainData, + targetNames: targetNames, + optimizerType: optimizerType, + iterationsLimit: iterationsLimit, + initialLearningRate: initialLearningRate, + minCoefficientsUpdate: minCoefficientsUpdate, + lambda: lambda, + regularizationType: regularizationType, + randomSeed: randomSeed, + batchSize: batchSize, + fitIntercept: fitIntercept, + interceptScale: interceptScale, + learningRateType: learningRateType, + isFittingDataNormalized: isFittingDataNormalized, + initialCoefficientsType: initialCoefficientsType, + initialCoefficients: initialCoefficients, + positiveLabel: positiveLabel, + negativeLabel: negativeLabel, + collectLearningData: collectLearningData, + dtype: dtype, + ); + } /// Restores previously fitted classifier instance from the [json] /// diff --git a/lib/src/classifier/softmax_regressor/softmax_regressor_impl.dart b/lib/src/classifier/softmax_regressor/softmax_regressor_impl.dart index d97642c1..30e40036 100644 --- a/lib/src/classifier/softmax_regressor/softmax_regressor_impl.dart +++ b/lib/src/classifier/softmax_regressor/softmax_regressor_impl.dart @@ -1,4 +1,6 @@ import 'package:json_annotation/json_annotation.dart'; +import 'package:ml_algo/src/classifier/_mixins/assessable_classifier_mixin.dart'; +import 'package:ml_algo/src/classifier/_mixins/classification_metrics_mixin.dart'; import 'package:ml_algo/src/classifier/_mixins/linear_classifier_mixin.dart'; import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor.dart'; import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor_json_keys.dart'; @@ -8,7 +10,6 @@ import 'package:ml_algo/src/helpers/validate_coefficients_matrix.dart'; import 'package:ml_algo/src/link_function/helpers/from_link_function_json.dart'; import 'package:ml_algo/src/link_function/helpers/link_function_to_json.dart'; import 'package:ml_algo/src/link_function/link_function.dart'; -import 'package:ml_algo/src/predictor/assessable_predictor_mixin.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/linalg.dart'; @@ -20,8 +21,9 @@ part 'softmax_regressor_impl.g.dart'; class SoftmaxRegressorImpl with LinearClassifierMixin, - AssessablePredictorMixin, - SerializableMixin + AssessableClassifierMixin, + SerializableMixin, + ClassificationMetricsMixin implements SoftmaxRegressor { @@ -54,6 +56,7 @@ class SoftmaxRegressorImpl Map toJson() => _$SoftmaxRegressorImplToJson(this); @override + @deprecated @JsonKey(name: softmaxRegressorClassNamesJsonKey) final Iterable classNames; @@ -89,9 +92,11 @@ class SoftmaxRegressorImpl ) final LinkFunction linkFunction; + @override @JsonKey(name: softmaxRegressorPositiveLabelJsonKey) final num positiveLabel; + @override @JsonKey(name: softmaxRegressorNegativeLabelJsonKey) final num negativeLabel; @@ -102,6 +107,9 @@ class SoftmaxRegressorImpl ) final List costPerIteration; + @override + Iterable get targetNames => classNames; + @override DataFrame predict(DataFrame testFeatures) { final allProbabilities = getProbabilitiesMatrix(testFeatures); @@ -121,7 +129,7 @@ class SoftmaxRegressorImpl return DataFrame.fromMatrix( labels, - header: classNames, + header: targetNames, ); } } diff --git a/lib/src/common/exception/invalid_metric_type_exception.dart b/lib/src/common/exception/invalid_metric_type_exception.dart new file mode 100644 index 00000000..457aceb1 --- /dev/null +++ b/lib/src/common/exception/invalid_metric_type_exception.dart @@ -0,0 +1,13 @@ +import 'package:ml_algo/src/metric/metric_type.dart'; + +class InvalidMetricTypeException implements Exception { + InvalidMetricTypeException(MetricType metricType, + List allowedTypes) : + message = 'Inappropriate metric provided, allowed metrics: ' + '$allowedTypes, $metricType given'; + + final String message; + + @override + String toString() => message; +} diff --git a/lib/src/common/exception/matrix_column_exception.dart b/lib/src/common/exception/matrix_column_exception.dart index b95d27a5..6e6706f0 100644 --- a/lib/src/common/exception/matrix_column_exception.dart +++ b/lib/src/common/exception/matrix_column_exception.dart @@ -1,7 +1,10 @@ class MatrixColumnException implements Exception { MatrixColumnException(int rowsCount, int columnsCount) : - message = 'Expected a matrix column, but matrix of shape ' + message = 'Expected a matrix column, matrix of shape ' '($rowsCount, $columnsCount) given'; final String message; + + @override + String toString() => message; } diff --git a/lib/src/di/common/init_common_module.dart b/lib/src/di/common/init_common_module.dart new file mode 100644 index 00000000..6eac29da --- /dev/null +++ b/lib/src/di/common/init_common_module.dart @@ -0,0 +1,80 @@ +import 'package:ml_algo/src/classifier/classifier.dart'; +import 'package:ml_algo/src/cost_function/cost_function_factory.dart'; +import 'package:ml_algo/src/cost_function/cost_function_factory_impl.dart'; +import 'package:ml_algo/src/di/dependency_keys.dart'; +import 'package:ml_algo/src/di/injector.dart'; +import 'package:ml_algo/src/helpers/features_target_split.dart'; +import 'package:ml_algo/src/helpers/features_target_split_interface.dart'; +import 'package:ml_algo/src/helpers/normalize_class_labels.dart'; +import 'package:ml_algo/src/helpers/normalize_class_labels_interface.dart'; +import 'package:ml_algo/src/linear_optimizer/convergence_detector/convergence_detector_factory.dart'; +import 'package:ml_algo/src/linear_optimizer/convergence_detector/convergence_detector_factory_impl.dart'; +import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_generator_factory.dart'; +import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_generator_factory_impl.dart'; +import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_generator_factory.dart'; +import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_generator_factory_impl.dart'; +import 'package:ml_algo/src/linear_optimizer/linear_optimizer_factory.dart'; +import 'package:ml_algo/src/linear_optimizer/linear_optimizer_factory_impl.dart'; +import 'package:ml_algo/src/math/randomizer/randomizer_factory.dart'; +import 'package:ml_algo/src/math/randomizer/randomizer_factory_impl.dart'; +import 'package:ml_algo/src/metric/metric_factory.dart'; +import 'package:ml_algo/src/metric/metric_factory_impl.dart'; +import 'package:ml_algo/src/model_selection/model_assessor/classifier_assessor.dart'; +import 'package:ml_algo/src/model_selection/model_assessor/model_assessor.dart'; +import 'package:ml_algo/src/model_selection/model_assessor/regressor_assessor.dart'; +import 'package:ml_algo/src/predictor/predictor.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_preprocessing/ml_preprocessing.dart'; + +typedef EncoderFactory = Encoder Function(DataFrame, Iterable); + +void initCommonModule() { + injector + ..clearAll() + ..registerSingleton( + () => (DataFrame data, Iterable targetNames) => + Encoder.oneHot(data, featureNames: targetNames), + dependencyName: oneHotEncoderFactoryKey) + + ..registerSingleton( + () => const RandomizerFactoryImpl()) + + ..registerDependency( + () => featuresTargetSplit) + + ..registerSingleton( + () => const MetricFactoryImpl()) + + ..registerDependency( + () => normalizeClassLabels) + + ..registerSingleton( + () => const LinearOptimizerFactoryImpl()) + + ..registerSingleton( + () => const LearningRateGeneratorFactoryImpl()) + + ..registerSingleton( + () => const InitialCoefficientsGeneratorFactoryImpl()) + + ..registerDependency( + () => const ConvergenceDetectorFactoryImpl()) + + ..registerSingleton( + () => const CostFunctionFactoryImpl()) + + ..registerSingleton>(() => + ClassifierAssessor( + injector.get(), + injector.get( + dependencyName: oneHotEncoderFactoryKey), + featuresTargetSplit, + normalizeClassLabels, + )) + + ..registerSingleton>(() => + RegressorAssessor( + injector.get(), + featuresTargetSplit, + )); +} diff --git a/lib/src/di/dependencies.dart b/lib/src/di/dependencies.dart deleted file mode 100644 index e26745e1..00000000 --- a/lib/src/di/dependencies.dart +++ /dev/null @@ -1,154 +0,0 @@ -import 'package:injector/injector.dart'; -import 'package:ml_algo/src/classifier/decision_tree_classifier/decision_tree_classifier_factory.dart'; -import 'package:ml_algo/src/classifier/decision_tree_classifier/decision_tree_classifier_factory_impl.dart'; -import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier_factory.dart'; -import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier_factory_impl.dart'; -import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor_factory.dart'; -import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor_factory_impl.dart'; -import 'package:ml_algo/src/common/sequence_elements_distribution_calculator/distribution_calculator_factory.dart'; -import 'package:ml_algo/src/common/sequence_elements_distribution_calculator/distribution_calculator_factory_impl.dart'; -import 'package:ml_algo/src/cost_function/cost_function_factory.dart'; -import 'package:ml_algo/src/cost_function/cost_function_factory_impl.dart'; -import 'package:ml_algo/src/di/injector.dart'; -import 'package:ml_algo/src/knn_kernel/kernel_factory.dart'; -import 'package:ml_algo/src/knn_kernel/kernel_factory_impl.dart'; -import 'package:ml_algo/src/knn_solver/knn_solver_factory.dart'; -import 'package:ml_algo/src/knn_solver/knn_solver_factory_impl.dart'; -import 'package:ml_algo/src/linear_optimizer/convergence_detector/convergence_detector_factory.dart'; -import 'package:ml_algo/src/linear_optimizer/convergence_detector/convergence_detector_factory_impl.dart'; -import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_generator_factory.dart'; -import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_generator_factory_impl.dart'; -import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_generator_factory.dart'; -import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_generator_factory_impl.dart'; -import 'package:ml_algo/src/linear_optimizer/linear_optimizer_factory.dart'; -import 'package:ml_algo/src/linear_optimizer/linear_optimizer_factory_impl.dart'; -import 'package:ml_algo/src/link_function/link_function.dart'; -import 'package:ml_algo/src/link_function/link_function_dependency_tokens.dart'; -import 'package:ml_algo/src/link_function/logit/float32_inverse_logit_function.dart'; -import 'package:ml_algo/src/link_function/logit/float64_inverse_logit_function.dart'; -import 'package:ml_algo/src/link_function/softmax/float32_softmax_link_function.dart'; -import 'package:ml_algo/src/link_function/softmax/float64_softmax_link_function.dart'; -import 'package:ml_algo/src/math/randomizer/randomizer_factory.dart'; -import 'package:ml_algo/src/math/randomizer/randomizer_factory_impl.dart'; -import 'package:ml_algo/src/model_selection/split_indices_provider/split_indices_provider_factory.dart'; -import 'package:ml_algo/src/model_selection/split_indices_provider/split_indices_provider_factory_impl.dart'; -import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor_factory.dart'; -import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor_factory_impl.dart'; -import 'package:ml_algo/src/tree_trainer/leaf_detector/leaf_detector_factory.dart'; -import 'package:ml_algo/src/tree_trainer/leaf_detector/leaf_detector_factory_impl.dart'; -import 'package:ml_algo/src/tree_trainer/leaf_label/leaf_label_factory_factory.dart'; -import 'package:ml_algo/src/tree_trainer/leaf_label/leaf_label_factory_factory_impl.dart'; -import 'package:ml_algo/src/tree_trainer/split_assessor/split_assessor_factory.dart'; -import 'package:ml_algo/src/tree_trainer/split_assessor/split_assessor_factory_impl.dart'; -import 'package:ml_algo/src/tree_trainer/split_selector/split_selector_factory.dart'; -import 'package:ml_algo/src/tree_trainer/split_selector/split_selector_factory_impl.dart'; -import 'package:ml_algo/src/tree_trainer/splitter/nominal_splitter/nominal_splitter_factory.dart'; -import 'package:ml_algo/src/tree_trainer/splitter/nominal_splitter/nominal_splitter_factory_impl.dart'; -import 'package:ml_algo/src/tree_trainer/splitter/numerical_splitter/numerical_splitter_factory.dart'; -import 'package:ml_algo/src/tree_trainer/splitter/numerical_splitter/numerical_splitter_factory_impl.dart'; -import 'package:ml_algo/src/tree_trainer/splitter/splitter_factory.dart'; -import 'package:ml_algo/src/tree_trainer/splitter/splitter_factory_impl.dart'; -import 'package:ml_algo/src/tree_trainer/tree_trainer_factory.dart'; -import 'package:ml_algo/src/tree_trainer/tree_trainer_factory_impl.dart'; - -Injector get dependencies => - injector ??= Injector() - ..registerSingleton( - () => const LinearOptimizerFactoryImpl()) - - ..registerSingleton( - () => const RandomizerFactoryImpl()) - - ..registerSingleton( - () => const LearningRateGeneratorFactoryImpl()) - - ..registerSingleton( - () => const InitialCoefficientsGeneratorFactoryImpl()) - - ..registerDependency( - () => const ConvergenceDetectorFactoryImpl()) - - ..registerSingleton( - () => const CostFunctionFactoryImpl()) - - ..registerSingleton( - () => const Float32InverseLogitLinkFunction(), - dependencyName: float32InverseLogitLinkFunctionToken) - - ..registerSingleton( - () => const Float64InverseLogitLinkFunction(), - dependencyName: float64InverseLogitLinkFunctionToken) - - ..registerSingleton( - () => const Float32SoftmaxLinkFunction(), - dependencyName: float32SoftmaxLinkFunctionToken) - - ..registerSingleton( - () => const Float64SoftmaxLinkFunction(), - dependencyName: float64SoftmaxLinkFunctionToken) - - ..registerSingleton( - () => const SplitIndicesProviderFactoryImpl()) - - ..registerSingleton( - () => const SoftmaxRegressorFactoryImpl()) - - ..registerSingleton( - () => const KernelFactoryImpl()) - - ..registerDependency( - () => const KnnSolverFactoryImpl()) - - ..registerSingleton( - () => const KnnClassifierFactoryImpl()) - - ..registerSingleton( - () => KnnRegressorFactoryImpl( - injector.get(), - injector.get(), - )) - - ..registerSingleton( - () => const SequenceElementsDistributionCalculatorFactoryImpl()) - - ..registerSingleton( - () => const NominalTreeSplitterFactoryImpl()) - - ..registerSingleton( - () => const NumericalTreeSplitterFactoryImpl()) - - ..registerSingleton( - () => const TreeSplitAssessorFactoryImpl()) - - ..registerSingleton( - () => TreeSplitterFactoryImpl( - injector.get(), - injector.get(), - injector.get(), - )) - - ..registerSingleton( - () => TreeSplitSelectorFactoryImpl( - injector.get(), - injector.get(), - )) - - ..registerSingleton( - () => TreeLeafDetectorFactoryImpl( - injector.get(), - )) - - ..registerSingleton( - () => TreeLeafLabelFactoryFactoryImpl( - injector.get(), - )) - - ..registerSingleton( - () => TreeTrainerFactoryImpl( - injector.get(), - injector.get(), - injector.get(), - )) - - ..registerSingleton( - () => const DecisionTreeClassifierFactoryImpl()); diff --git a/lib/src/di/dependency_keys.dart b/lib/src/di/dependency_keys.dart new file mode 100644 index 00000000..dfa77762 --- /dev/null +++ b/lib/src/di/dependency_keys.dart @@ -0,0 +1 @@ +const oneHotEncoderFactoryKey = 'oneHotEncoder'; diff --git a/lib/src/di/injector.dart b/lib/src/di/injector.dart index d2a981c8..95d54324 100644 --- a/lib/src/di/injector.dart +++ b/lib/src/di/injector.dart @@ -1,3 +1,3 @@ import 'package:injector/injector.dart'; -Injector injector; +final injector = Injector(); diff --git a/lib/src/helpers/binarize_column_matrix.dart b/lib/src/helpers/binarize_column_matrix.dart new file mode 100644 index 00000000..bc6165e1 --- /dev/null +++ b/lib/src/helpers/binarize_column_matrix.dart @@ -0,0 +1,20 @@ +import 'package:ml_algo/src/common/exception/matrix_column_exception.dart'; +import 'package:ml_linalg/matrix.dart'; + +Matrix binarizeColumnMatrix(Matrix source) { + if (source.columnsNum != 1) { + throw MatrixColumnException(source.rowsNum, source.columnsNum); + } + + final sourceAsVector = source + .toVector(); + final binarizedVectors = sourceAsVector + .unique() + .map( + (targetValue) => sourceAsVector + .mapToVector( + (sourceValue) => sourceValue == targetValue + ? 1 : 0)).toList(); + + return Matrix.fromColumns(binarizedVectors, dtype: source.dtype); +} diff --git a/lib/src/helpers/features_target_split_dependency_key.dart b/lib/src/helpers/features_target_split_dependency_key.dart new file mode 100644 index 00000000..e20baab7 --- /dev/null +++ b/lib/src/helpers/features_target_split_dependency_key.dart @@ -0,0 +1 @@ +const featuresTargetSplitKey = 'featureTargetSplit'; diff --git a/lib/src/helpers/features_target_split_interface.dart b/lib/src/helpers/features_target_split_interface.dart new file mode 100644 index 00000000..91d4346d --- /dev/null +++ b/lib/src/helpers/features_target_split_interface.dart @@ -0,0 +1,6 @@ +import 'package:ml_dataframe/ml_dataframe.dart'; + +typedef FeaturesTargetSplit = Iterable Function(DataFrame dataset, { + Iterable targetIndices, + Iterable targetNames, +}); diff --git a/lib/src/helpers/normalize_class_labels_interface.dart b/lib/src/helpers/normalize_class_labels_interface.dart new file mode 100644 index 00000000..13f55c91 --- /dev/null +++ b/lib/src/helpers/normalize_class_labels_interface.dart @@ -0,0 +1,3 @@ +import 'package:ml_linalg/matrix.dart'; + +typedef NormalizeClassLabels = Matrix Function(Matrix, num, num); diff --git a/lib/src/linear_optimizer/coordinate_optimizer/coordinate_descent_optimizer.dart b/lib/src/linear_optimizer/coordinate_optimizer/coordinate_descent_optimizer.dart index 21ce9127..d4b01a63 100644 --- a/lib/src/linear_optimizer/coordinate_optimizer/coordinate_descent_optimizer.dart +++ b/lib/src/linear_optimizer/coordinate_optimizer/coordinate_descent_optimizer.dart @@ -1,5 +1,5 @@ import 'package:ml_algo/src/cost_function/cost_function.dart'; -import 'package:ml_algo/src/di/dependencies.dart'; +import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/linear_optimizer/convergence_detector/convergence_detector.dart'; import 'package:ml_algo/src/linear_optimizer/convergence_detector/convergence_detector_factory.dart'; import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_generator.dart'; @@ -23,11 +23,11 @@ class CoordinateDescentOptimizer implements LinearOptimizer { _labels = fittingLabels, _lambda = lambda ?? 0.0, - _initialCoefficientsGenerator = dependencies + _initialCoefficientsGenerator = injector .get() .fromType(initialWeightsType, dtype), - _convergenceDetector = dependencies + _convergenceDetector = injector .get() .create(minCoefficientsUpdate, iterationsLimit), @@ -90,7 +90,7 @@ class CoordinateDescentOptimizer implements LinearOptimizer { return Vector // TODO Convert the logic into SIMD-way (SIMD way mapping) .fromList(coefficients.map((coef) => _regularize(coef, _lambda, j)) - .toList(growable: false)); + .toList(growable: false), dtype: _dtype); } double _regularize(double coefficient, double lambda, int coefNum) { diff --git a/lib/src/linear_optimizer/gradient_optimizer/gradient_optimizer.dart b/lib/src/linear_optimizer/gradient_optimizer/gradient_optimizer.dart index d37a351d..6efa7d70 100644 --- a/lib/src/linear_optimizer/gradient_optimizer/gradient_optimizer.dart +++ b/lib/src/linear_optimizer/gradient_optimizer/gradient_optimizer.dart @@ -1,5 +1,5 @@ import 'package:ml_algo/src/cost_function/cost_function.dart'; -import 'package:ml_algo/src/di/dependencies.dart'; +import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/linear_optimizer/convergence_detector/convergence_detector.dart'; import 'package:ml_algo/src/linear_optimizer/convergence_detector/convergence_detector_factory.dart'; import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_generator.dart'; @@ -35,19 +35,19 @@ class GradientOptimizer implements LinearOptimizer { _costFunction = costFunction, _dtype = dtype, - _initialCoefficientsGenerator = dependencies + _initialCoefficientsGenerator = injector .get() .fromType(initialCoefficientsType, dtype), - _learningRateGenerator = dependencies + _learningRateGenerator = injector .get() .fromType(learningRateType), - _convergenceDetector = dependencies + _convergenceDetector = injector .get() .create(minCoefficientsUpdate, iterationLimit), - _randomizer = dependencies + _randomizer = injector .get() .create(randomSeed) { if (batchSize < 1 || batchSize > points.rowsNum) { diff --git a/lib/src/metric/classification/accuracy.dart b/lib/src/metric/classification/accuracy.dart index 8683ec55..2a596ede 100644 --- a/lib/src/metric/classification/accuracy.dart +++ b/lib/src/metric/classification/accuracy.dart @@ -14,7 +14,8 @@ class AccuracyMetric implements Metric { } final score = zip([origLabels.rows, predictedLabels.rows]) - .where((rows) => rows.first == rows.last).length; + .where((rows) => rows.first == rows.last) + .length; return score / origLabels.rowsNum; } diff --git a/lib/src/metric/classification/precision.dart b/lib/src/metric/classification/precision.dart new file mode 100644 index 00000000..ae4a5b23 --- /dev/null +++ b/lib/src/metric/classification/precision.dart @@ -0,0 +1,72 @@ +import 'package:ml_algo/src/helpers/normalize_class_labels.dart'; +import 'package:ml_algo/src/metric/metric.dart'; +import 'package:ml_linalg/matrix.dart'; +import 'package:ml_linalg/vector.dart'; +import 'package:quiver/iterables.dart'; + +/// TODO: add warning if predicted values are all zeroes +class PrecisionMetric implements Metric { + const PrecisionMetric(); + + @override + /// Accepts [predictedLabels] and [origLabels] with entries with `1` as + /// positive label and `0` as negative one + double getScore(Matrix predictedLabels, Matrix origLabels) { + final allPredictedPositiveCounts = predictedLabels + .reduceRows((counts, row) => counts + row); + + // Let's say we have the following data: + // + // orig labels | predicted labels + // ------------------------------- + // 1 | 1 + // 1 | 0 + // 0 | 1 + // 0 | 0 + // 1 | 1 + //-------------------------------- + // + // in order to count correctly predicted positive labels in matrix notation + // we may multiple predicted labels by 2, and then subtract the two + // matrices from each other: + // + // 1 - (1 * 2) = -1 + // 1 - (0 * 2) = 1 + // 0 - (1 * 2) = -2 + // 0 - (0 * 2) = 0 + // 1 - (1 * 2) = -1 + // + // we see that matrices subtraction in case of original positive label and a + // predicted positive label gives -1, thus we need to count number of elements + // with value equals -1 in the resulting matrix + final difference = origLabels - (predictedLabels * 2); + final correctPositiveCounts = difference + .reduceRows( + (counts, row) => counts + row.mapToVector((diff) => diff == -1 + ? 1 : 0), + initValue: Vector.zero( + origLabels.columnsNum, + dtype: origLabels.dtype, + )); + final aggregatedScore = (correctPositiveCounts / allPredictedPositiveCounts) + .mean(); + + if (aggregatedScore.isFinite) { + return aggregatedScore; + } + + return zip([ + correctPositiveCounts, + allPredictedPositiveCounts, + ]).fold(0, (aggregated, pair) { + final correctPositiveCount = pair.first; + final allPredictedPositiveCount = pair.last; + + if (allPredictedPositiveCount != 0) { + return aggregated + correctPositiveCount / allPredictedPositiveCount; + } + + return aggregated + (correctPositiveCount == 0 ? 1 : 0); + }); + } +} diff --git a/lib/src/metric/classification/type.dart b/lib/src/metric/classification/type.dart deleted file mode 100644 index 5b5c0f9d..00000000 --- a/lib/src/metric/classification/type.dart +++ /dev/null @@ -1 +0,0 @@ -enum ClassificationMetricType { accuracy } diff --git a/lib/src/metric/metric_factory.dart b/lib/src/metric/metric_factory.dart new file mode 100644 index 00000000..48422970 --- /dev/null +++ b/lib/src/metric/metric_factory.dart @@ -0,0 +1,6 @@ +import 'package:ml_algo/src/metric/metric.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; + +abstract class MetricFactory { + Metric createByType(MetricType metricType); +} diff --git a/lib/src/metric/factory.dart b/lib/src/metric/metric_factory_impl.dart similarity index 64% rename from lib/src/metric/factory.dart rename to lib/src/metric/metric_factory_impl.dart index 1742ac8e..2358a0e4 100644 --- a/lib/src/metric/factory.dart +++ b/lib/src/metric/metric_factory_impl.dart @@ -1,11 +1,16 @@ import 'package:ml_algo/src/metric/classification/accuracy.dart'; +import 'package:ml_algo/src/metric/classification/precision.dart'; import 'package:ml_algo/src/metric/metric.dart'; +import 'package:ml_algo/src/metric/metric_factory.dart'; import 'package:ml_algo/src/metric/metric_type.dart'; import 'package:ml_algo/src/metric/regression/mape.dart'; import 'package:ml_algo/src/metric/regression/rmse.dart'; -class MetricFactory { - static Metric createByType(MetricType type) { +class MetricFactoryImpl implements MetricFactory { + const MetricFactoryImpl(); + + @override + Metric createByType(MetricType type) { switch (type) { case MetricType.rmse: return const RmseMetric(); @@ -16,6 +21,9 @@ class MetricFactory { case MetricType.accuracy: return const AccuracyMetric(); + case MetricType.precision: + return const PrecisionMetric(); + default: throw UnsupportedError('Unsupported metric type $type'); } diff --git a/lib/src/metric/metric_type.dart b/lib/src/metric/metric_type.dart index bc5778bc..63202a58 100644 --- a/lib/src/metric/metric_type.dart +++ b/lib/src/metric/metric_type.dart @@ -1 +1 @@ -enum MetricType { mape, rmse, accuracy } +enum MetricType { mape, rmse, accuracy, precision } diff --git a/lib/src/metric/regression/mape.dart b/lib/src/metric/regression/mape.dart index 33edc0bd..5f3c73de 100644 --- a/lib/src/metric/regression/mape.dart +++ b/lib/src/metric/regression/mape.dart @@ -12,6 +12,7 @@ class MapeMetric implements Metric { } final predicted = predictedLabels.getColumn(0); final original = origLabels.getColumn(0); + return 100 / predicted.length * ((original - predicted) / original).abs().sum(); } diff --git a/lib/src/metric/regression/rmse.dart b/lib/src/metric/regression/rmse.dart index bc6d31c4..fd9746de 100644 --- a/lib/src/metric/regression/rmse.dart +++ b/lib/src/metric/regression/rmse.dart @@ -13,8 +13,10 @@ class RmseMetric implements Metric { 'a matrix-column'); } - final predicted = predictedLabels.getColumn(0); - final original = origLabels.getColumn(0); + final predicted = predictedLabels + .getColumn(0); + final original = origLabels + .getColumn(0); return math.sqrt(((predicted - original).pow(2)).mean()); } diff --git a/lib/src/metric/regression/type.dart b/lib/src/metric/regression/type.dart deleted file mode 100644 index 77073dbb..00000000 --- a/lib/src/metric/regression/type.dart +++ /dev/null @@ -1 +0,0 @@ -enum RegressionMetricType { mape, rmse } diff --git a/lib/src/model_selection/_init_module.dart b/lib/src/model_selection/_init_module.dart new file mode 100644 index 00000000..b49f4589 --- /dev/null +++ b/lib/src/model_selection/_init_module.dart @@ -0,0 +1,11 @@ +import 'package:ml_algo/src/model_selection/_injector.dart'; +import 'package:ml_algo/src/model_selection/split_indices_provider/split_indices_provider_factory.dart'; +import 'package:ml_algo/src/model_selection/split_indices_provider/split_indices_provider_factory_impl.dart'; + +void initModelSelectionModule() { + if (!modelSelectionInjector.exists()) { + modelSelectionInjector + ..registerSingleton( + () => const SplitIndicesProviderFactoryImpl()); + } +} diff --git a/lib/src/model_selection/_injector.dart b/lib/src/model_selection/_injector.dart new file mode 100644 index 00000000..fbefd145 --- /dev/null +++ b/lib/src/model_selection/_injector.dart @@ -0,0 +1,3 @@ +import 'package:injector/injector.dart'; + +final modelSelectionInjector = Injector(); diff --git a/lib/src/model_selection/assessable.dart b/lib/src/model_selection/assessable.dart index 4b6aa675..f173af6e 100644 --- a/lib/src/model_selection/assessable.dart +++ b/lib/src/model_selection/assessable.dart @@ -1,8 +1,11 @@ import 'package:ml_algo/src/metric/metric_type.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; +/// An interface for a ML model's performance assessment abstract class Assessable { - /// Assesses model according to provided [metricType] - double assess(DataFrame observations, Iterable targetNames, - MetricType metricType); + /// Assesses model performance according to provided [metricType] + /// + /// Throws an exception if inappropriate [metricType] provided. All the + /// appropriate metric types are in [allowedMetrics] + double assess(DataFrame observations, MetricType metricType); } diff --git a/lib/src/model_selection/cross_validator/cross_validator.dart b/lib/src/model_selection/cross_validator/cross_validator.dart index ce49658f..2e6692a0 100644 --- a/lib/src/model_selection/cross_validator/cross_validator.dart +++ b/lib/src/model_selection/cross_validator/cross_validator.dart @@ -1,5 +1,6 @@ -import 'package:ml_algo/src/di/dependencies.dart'; import 'package:ml_algo/src/metric/metric_type.dart'; +import 'package:ml_algo/src/model_selection/_init_module.dart'; +import 'package:ml_algo/src/model_selection/_injector.dart'; import 'package:ml_algo/src/model_selection/assessable.dart'; import 'package:ml_algo/src/model_selection/cross_validator/cross_validator_impl.dart'; import 'package:ml_algo/src/model_selection/split_indices_provider/split_indices_provider_factory.dart'; @@ -8,8 +9,7 @@ import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/linalg.dart'; -typedef PredictorFactory = Assessable Function(DataFrame observations, - Iterable targetNames); +typedef PredictorFactory = Assessable Function(DataFrame observations); typedef DataPreprocessFn = List Function(DataFrame trainData, DataFrame testData); @@ -26,25 +26,24 @@ abstract class CrossValidator { /// [samples] A dataset to be split into parts to iteratively evaluate given /// predictor's performance /// - /// [targetColumnNames] Names of columns from [samples] that contain outcomes - /// /// [numberOfFolds] Number of splits of the [samples] /// /// [dtype] A type for all the numerical data factory CrossValidator.kFold( DataFrame samples, - Iterable targetColumnNames, { + { int numberOfFolds = 5, DType dtype = DType.float32, }) { - final dataSplitterFactory = dependencies + initModelSelectionModule(); + + final dataSplitterFactory = modelSelectionInjector .get(); final dataSplitter = dataSplitterFactory .createByType(SplitIndicesProviderType.kFold, numberOfFolds: numberOfFolds); return CrossValidatorImpl( samples, - targetColumnNames, dataSplitter, dtype, ); @@ -60,25 +59,23 @@ abstract class CrossValidator { /// [samples] A dataset to be split into parts to iteratively /// evaluate given predictor's performance /// - /// [targetColumnNames] Names of columns from [samples] that contain outcomes. - /// /// [p] Size of a split of [samples]. /// /// [dtype] A type for all the numerical data. factory CrossValidator.lpo( DataFrame samples, - Iterable targetColumnNames, int p, { DType dtype = DType.float32, }) { - final dataSplitterFactory = dependencies + initModelSelectionModule(); + + final dataSplitterFactory = modelSelectionInjector .get(); final dataSplitter = dataSplitterFactory .createByType(SplitIndicesProviderType.lpo, p: p); return CrossValidatorImpl( samples, - targetColumnNames, dataSplitter, dtype, ); @@ -116,7 +113,7 @@ abstract class CrossValidator { /// header: header, /// headerExists: false, /// ); - /// final predictorFactory = (trainData, _) => + /// final predictorFactory = (trainData) => /// KnnRegressor(trainData, 'col_3', k: 4); /// final onDataSplit = (trainData, testData) { /// final standardizer = Standardizer(trainData); @@ -125,7 +122,7 @@ abstract class CrossValidator { /// standardizer.process(testData), /// ]; /// } - /// final validator = CrossValidator.kFold(data, ['col_3']); + /// final validator = CrossValidator.kFold(data); /// final scores = await validator.evaluate( /// predictorFactory, /// MetricType.mape, diff --git a/lib/src/model_selection/cross_validator/cross_validator_impl.dart b/lib/src/model_selection/cross_validator/cross_validator_impl.dart index f3b24432..23cc83b4 100644 --- a/lib/src/model_selection/cross_validator/cross_validator_impl.dart +++ b/lib/src/model_selection/cross_validator/cross_validator_impl.dart @@ -12,14 +12,12 @@ import 'package:quiver/iterables.dart'; class CrossValidatorImpl implements CrossValidator { CrossValidatorImpl( this.samples, - this.targetNames, this._splitter, this.dtype, ); final DataFrame samples; final DType dtype; - final Iterable targetNames; final SplitIndicesProvider _splitter; @override @@ -59,8 +57,8 @@ class CrossValidatorImpl implements CrossValidator { transformedTestDataColumnsNum); } - return predictorFactory(transformedTrainData, targetNames) - .assess(transformedTestData, targetNames, metricType); + return predictorFactory(transformedTrainData) + .assess(transformedTestData, metricType); }) .toList(); diff --git a/lib/src/model_selection/model_assessor/classifier_assessor.dart b/lib/src/model_selection/model_assessor/classifier_assessor.dart new file mode 100644 index 00000000..6175a8e1 --- /dev/null +++ b/lib/src/model_selection/model_assessor/classifier_assessor.dart @@ -0,0 +1,88 @@ +import 'package:ml_algo/src/classifier/classifier.dart'; +import 'package:ml_algo/src/common/exception/invalid_metric_type_exception.dart'; +import 'package:ml_algo/src/di/common/init_common_module.dart'; +import 'package:ml_algo/src/helpers/features_target_split_interface.dart'; +import 'package:ml_algo/src/helpers/normalize_class_labels_interface.dart'; +import 'package:ml_algo/src/metric/metric_factory.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; +import 'package:ml_algo/src/model_selection/model_assessor/model_assessor.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; + +class ClassifierAssessor implements ModelAssessor { + ClassifierAssessor( + this._metricFactory, + this._encoderFactory, + this._featuresTargetSplit, + this._normalizeClassLabels, + ); + + static const List _allowedMetricTypes = [ + MetricType.precision, + MetricType.accuracy, + ]; + + final MetricFactory _metricFactory; + final EncoderFactory _encoderFactory; + final FeaturesTargetSplit _featuresTargetSplit; + final NormalizeClassLabels _normalizeClassLabels; + + @override + double assess( + Classifier classifier, + MetricType metricType, + DataFrame samples, + ) { + if (!_allowedMetricTypes.contains(metricType)) { + throw InvalidMetricTypeException( + metricType, _allowedMetricTypes); + } + + final splits = _featuresTargetSplit( + samples, + targetNames: classifier.targetNames, + ).toList(); + final featuresFrame = splits[0]; + final originalLabelsFrame = splits[1]; + final metric = _metricFactory + .createByType(metricType); + final labelEncoder = _encoderFactory( + originalLabelsFrame, + originalLabelsFrame.header + ); + final isTargetEncoded = classifier.targetNames.length > 1; + final predictedLabels = !isTargetEncoded + ? labelEncoder + .process(classifier.predict(featuresFrame)) + .toMatrix(classifier.dtype) + : classifier + .predict(featuresFrame) + .toMatrix(classifier.dtype); + final originalLabels = !isTargetEncoded + ? labelEncoder + .process(originalLabelsFrame) + .toMatrix(classifier.dtype) + : originalLabelsFrame + .toMatrix(classifier.dtype); + final predefinedClassLabelsExist = classifier.negativeLabel != null + && classifier.positiveLabel != null; + final normalizedPredictedLabels = predefinedClassLabelsExist + ? _normalizeClassLabels( + predictedLabels, + classifier.positiveLabel, + classifier.negativeLabel, + ) + : predictedLabels; + final normalizedOriginalLabels = predefinedClassLabelsExist + ? _normalizeClassLabels( + originalLabels, + classifier.positiveLabel, + classifier.negativeLabel, + ) + : originalLabels; + + return metric.getScore( + normalizedPredictedLabels, + normalizedOriginalLabels, + ); + } +} diff --git a/lib/src/model_selection/model_assessor/model_assessor.dart b/lib/src/model_selection/model_assessor/model_assessor.dart new file mode 100644 index 00000000..0d299f06 --- /dev/null +++ b/lib/src/model_selection/model_assessor/model_assessor.dart @@ -0,0 +1,11 @@ +import 'package:ml_algo/src/metric/metric_type.dart'; +import 'package:ml_algo/src/predictor/predictor.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; + +abstract class ModelAssessor { + double assess( + T predictor, + MetricType metricType, + DataFrame samples, + ); +} diff --git a/lib/src/model_selection/model_assessor/regressor_assessor.dart b/lib/src/model_selection/model_assessor/regressor_assessor.dart new file mode 100644 index 00000000..cb67e516 --- /dev/null +++ b/lib/src/model_selection/model_assessor/regressor_assessor.dart @@ -0,0 +1,53 @@ +import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_algo/src/common/exception/invalid_metric_type_exception.dart'; +import 'package:ml_algo/src/helpers/features_target_split.dart'; +import 'package:ml_algo/src/helpers/features_target_split_interface.dart'; +import 'package:ml_algo/src/metric/metric_factory.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; +import 'package:ml_algo/src/model_selection/model_assessor/model_assessor.dart'; +import 'package:ml_algo/src/predictor/predictor.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; + +class RegressorAssessor implements ModelAssessor { + RegressorAssessor( + this._metricFactory, + this._featuresTargetSplit, + ); + + static const List _allowedMetricTypes = [ + MetricType.rmse, + MetricType.mape, + ]; + + final MetricFactory _metricFactory; + final FeaturesTargetSplit _featuresTargetSplit; + + @override + double assess( + Predictor regressor, + MetricType metricType, + DataFrame samples, + ) { + if (!_allowedMetricTypes.contains(metricType)) { + throw InvalidMetricTypeException( + metricType, _allowedMetricTypes); + } + + final splits = _featuresTargetSplit( + samples, + targetNames: regressor.targetNames, + ).toList(); + final featuresFrame = splits[0]; + final originalLabelsFrame = splits[1]; + final metric = _metricFactory + .createByType(metricType); + final predictedLabels = regressor + .predict(featuresFrame) + .toMatrix(regressor.dtype); + final originalLabels = originalLabelsFrame + .toMatrix(regressor.dtype); + + return metric + .getScore(predictedLabels, originalLabels); + } +} diff --git a/lib/src/predictor/assessable_predictor_mixin.dart b/lib/src/predictor/assessable_predictor_mixin.dart deleted file mode 100644 index 3726aeff..00000000 --- a/lib/src/predictor/assessable_predictor_mixin.dart +++ /dev/null @@ -1,22 +0,0 @@ -import 'package:ml_algo/src/helpers/features_target_split.dart'; -import 'package:ml_algo/src/metric/factory.dart'; -import 'package:ml_algo/src/metric/metric_type.dart'; -import 'package:ml_algo/src/model_selection/assessable.dart'; -import 'package:ml_algo/src/predictor/predictor.dart'; -import 'package:ml_dataframe/ml_dataframe.dart'; - -mixin AssessablePredictorMixin implements Assessable, Predictor { - @override - double assess(DataFrame samples, Iterable targetNames, - MetricType metricType) { - final splits = featuresTargetSplit(samples, - targetNames: targetNames, - ).toList(); - - final metric = MetricFactory.createByType(metricType); - final prediction = predict(splits[0]); - final origLabels = splits[1].toMatrix(dtype); - - return metric.getScore(prediction.toMatrix(dtype), origLabels); - } -} diff --git a/lib/src/predictor/predictor.dart b/lib/src/predictor/predictor.dart index 04c6fe4a..ac7cca88 100644 --- a/lib/src/predictor/predictor.dart +++ b/lib/src/predictor/predictor.dart @@ -3,6 +3,10 @@ import 'package:ml_linalg/dtype.dart'; /// A common interface for all types of classifiers and regressors abstract class Predictor { + /// A collection of target column names of a dataset used to learn the ML + /// model + Iterable get targetNames; + /// Returns prediction, based on the model learned parameters DataFrame predict(DataFrame testFeatures); diff --git a/lib/src/regressor/_helpers/squared_cost_optimizer_factory.dart b/lib/src/regressor/_helpers/squared_cost_optimizer_factory.dart index 5195ed41..de4c9fc9 100644 --- a/lib/src/regressor/_helpers/squared_cost_optimizer_factory.dart +++ b/lib/src/regressor/_helpers/squared_cost_optimizer_factory.dart @@ -1,6 +1,6 @@ import 'package:ml_algo/src/cost_function/cost_function_factory.dart'; import 'package:ml_algo/src/cost_function/cost_function_type.dart'; -import 'package:ml_algo/src/di/dependencies.dart'; +import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/helpers/add_intercept_if.dart'; import 'package:ml_algo/src/helpers/features_target_split.dart'; import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_type.dart'; @@ -32,19 +32,12 @@ LinearOptimizer createSquaredCostOptimizer( Matrix initialCoefficients, DType dtype, }) { - final splits = featuresTargetSplit(observations, targetNames: [targetName]).toList(); - final points = splits[0].toMatrix(dtype); final labels = splits[1].toMatrix(dtype); - - final optimizerFactory = dependencies - .get(); - - final costFunctionFactory = dependencies - .get(); - + final optimizerFactory = injector.get(); + final costFunctionFactory = injector.get(); final costFunction = costFunctionFactory.createByType( CostFunctionType.leastSquare, ); diff --git a/lib/src/regressor/_mixins/assessable_regressor_mixin.dart b/lib/src/regressor/_mixins/assessable_regressor_mixin.dart new file mode 100644 index 00000000..8ea48296 --- /dev/null +++ b/lib/src/regressor/_mixins/assessable_regressor_mixin.dart @@ -0,0 +1,19 @@ +import 'package:ml_algo/src/di/injector.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; +import 'package:ml_algo/src/model_selection/assessable.dart'; +import 'package:ml_algo/src/model_selection/model_assessor/model_assessor.dart'; +import 'package:ml_algo/src/predictor/predictor.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; + +mixin AssessableRegressorMixin implements + Assessable, + Predictor { + + @override + double assess( + DataFrame samples, + MetricType metricType, + ) => injector + .get>() + .assess(this, metricType, samples); +} diff --git a/lib/src/regressor/_mixins/regression_metrics_mixin.dart b/lib/src/regressor/_mixins/regression_metrics_mixin.dart new file mode 100644 index 00000000..d08107cd --- /dev/null +++ b/lib/src/regressor/_mixins/regression_metrics_mixin.dart @@ -0,0 +1,8 @@ +import 'package:ml_algo/ml_algo.dart'; + +mixin RegressionMetricsMixin { + List get allowedMetrics => [ + MetricType.mape, + MetricType.rmse, + ]; +} diff --git a/lib/src/regressor/knn_regressor/_helpers/create_knn_regressor.dart b/lib/src/regressor/knn_regressor/_helpers/create_knn_regressor.dart new file mode 100644 index 00000000..95e68905 --- /dev/null +++ b/lib/src/regressor/knn_regressor/_helpers/create_knn_regressor.dart @@ -0,0 +1,25 @@ +import 'package:ml_algo/src/knn_kernel/kernel_type.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/_injector.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor_factory.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/distance.dart'; +import 'package:ml_linalg/dtype.dart'; + +KnnRegressor createKnnRegressor({ + DataFrame fittingData, + String targetName, + int k, + KernelType kernel = KernelType.gaussian, + Distance distance = Distance.euclidean, + DType dtype = DType.float32, +}) => knnRegressorInjector + .get() + .create( + fittingData, + targetName, + k, + kernel, + distance, + dtype, +); diff --git a/lib/src/regressor/knn_regressor/_init_module.dart b/lib/src/regressor/knn_regressor/_init_module.dart new file mode 100644 index 00000000..df9d5cdd --- /dev/null +++ b/lib/src/regressor/knn_regressor/_init_module.dart @@ -0,0 +1,26 @@ +import 'package:ml_algo/src/di/common/init_common_module.dart'; +import 'package:ml_algo/src/knn_kernel/kernel_factory.dart'; +import 'package:ml_algo/src/knn_kernel/kernel_factory_impl.dart'; +import 'package:ml_algo/src/knn_solver/knn_solver_factory.dart'; +import 'package:ml_algo/src/knn_solver/knn_solver_factory_impl.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/_injector.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor_factory.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor_factory_impl.dart'; + +void initKnnRegressorModule() { + initCommonModule(); + + knnRegressorInjector + ..clearAll() + ..registerSingleton( + () => const KernelFactoryImpl()) + + ..registerDependency( + () => const KnnSolverFactoryImpl()) + + ..registerSingleton( + () => KnnRegressorFactoryImpl( + knnRegressorInjector.get(), + knnRegressorInjector.get(), + )); +} diff --git a/lib/src/regressor/knn_regressor/_injector.dart b/lib/src/regressor/knn_regressor/_injector.dart new file mode 100644 index 00000000..4725468e --- /dev/null +++ b/lib/src/regressor/knn_regressor/_injector.dart @@ -0,0 +1,3 @@ +import 'package:injector/injector.dart'; + +final knnRegressorInjector = Injector(); diff --git a/lib/src/regressor/knn_regressor/knn_regressor.dart b/lib/src/regressor/knn_regressor/knn_regressor.dart index 872ba0b7..b615968b 100644 --- a/lib/src/regressor/knn_regressor/knn_regressor.dart +++ b/lib/src/regressor/knn_regressor/knn_regressor.dart @@ -1,8 +1,8 @@ -import 'package:ml_algo/src/di/dependencies.dart'; import 'package:ml_algo/src/knn_kernel/kernel_type.dart'; import 'package:ml_algo/src/model_selection/assessable.dart'; import 'package:ml_algo/src/predictor/predictor.dart'; -import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor_factory.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/_helpers/create_knn_regressor.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/_init_module.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/distance.dart'; import 'package:ml_linalg/dtype.dart'; @@ -50,7 +50,16 @@ abstract class KnnRegressor implements Assessable, Predictor { Distance distance = Distance.euclidean, DType dtype = DType.float32, } - ) => dependencies - .get() - .create(fittingData, targetName, k, kernel, distance, dtype); + ) { + initKnnRegressorModule(); + + return createKnnRegressor( + fittingData: fittingData, + targetName: targetName, + k: k, + kernel: kernel, + distance: distance, + dtype: dtype, + ); + } } diff --git a/lib/src/regressor/knn_regressor/knn_regressor_impl.dart b/lib/src/regressor/knn_regressor/knn_regressor_impl.dart index 9768bc8f..e5304005 100644 --- a/lib/src/regressor/knn_regressor/knn_regressor_impl.dart +++ b/lib/src/regressor/knn_regressor/knn_regressor_impl.dart @@ -1,14 +1,18 @@ import 'package:ml_algo/src/helpers/validate_test_features.dart'; import 'package:ml_algo/src/knn_kernel/kernel.dart'; import 'package:ml_algo/src/knn_solver/knn_solver.dart'; -import 'package:ml_algo/src/predictor/assessable_predictor_mixin.dart'; +import 'package:ml_algo/src/regressor/_mixins/assessable_regressor_mixin.dart'; import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/matrix.dart'; import 'package:ml_linalg/vector.dart'; -class KnnRegressorImpl with AssessablePredictorMixin implements KnnRegressor { +class KnnRegressorImpl + with + AssessableRegressorMixin + implements + KnnRegressor { KnnRegressorImpl( this._targetName, this._solver, @@ -19,6 +23,9 @@ class KnnRegressorImpl with AssessablePredictorMixin implements KnnRegressor { @override final DType dtype; + @override + Iterable get targetNames => [_targetName]; + final String _targetName; final KnnSolver _solver; final Kernel _kernel; @@ -46,14 +53,17 @@ class KnnRegressorImpl with AssessablePredictorMixin implements KnnRegressor { final neighbours = _solver.findKNeighbours(features); return neighbours.map((kNeighbours) { - final weightedLabels = kNeighbours.fold(_zeroVector, (weightedSum, neighbour) { + final weightedLabels = kNeighbours.fold(_zeroVector, + (weightedSum, neighbour) { final weight = _kernel.getWeightByDistance(neighbour.distance); final weightedLabel = neighbour.label * weight; + return weightedSum + weightedLabel; }); final weightsSum = kNeighbours.fold(0, - (sum, neighbour) => sum + _kernel.getWeightByDistance(neighbour.distance)); + (sum, neighbour) => sum + _kernel + .getWeightByDistance(neighbour.distance)); return weightedLabels / weightsSum; }); diff --git a/lib/src/regressor/linear_regressor/_helpers/create_linear_regressor.dart b/lib/src/regressor/linear_regressor/_helpers/create_linear_regressor.dart new file mode 100644 index 00000000..9acbf2b8 --- /dev/null +++ b/lib/src/regressor/linear_regressor/_helpers/create_linear_regressor.dart @@ -0,0 +1,67 @@ +import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_type.dart'; +import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_type.dart'; +import 'package:ml_algo/src/linear_optimizer/linear_optimizer_type.dart'; +import 'package:ml_algo/src/linear_optimizer/regularization_type.dart'; +import 'package:ml_algo/src/regressor/_helpers/squared_cost_optimizer_factory.dart'; +import 'package:ml_algo/src/regressor/linear_regressor/linear_regressor.dart'; +import 'package:ml_algo/src/regressor/linear_regressor/linear_regressor_impl.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/dtype.dart'; +import 'package:ml_linalg/matrix.dart'; + +LinearRegressor createLinearRegressor({ + DataFrame fittingData, + String targetName, + LinearOptimizerType optimizerType = LinearOptimizerType.gradient, + int iterationsLimit = 100, + LearningRateType learningRateType = LearningRateType.constant, + InitialCoefficientsType initialCoefficientsType = + InitialCoefficientsType.zeroes, + double initialLearningRate = 1e-3, + double minCoefficientsUpdate = 1e-12, + double lambda, + RegularizationType regularizationType, + bool fitIntercept = false, + double interceptScale = 1.0, + int randomSeed, + int batchSize = 1, + Matrix initialCoefficients, + bool isFittingDataNormalized = false, + bool collectLearningData = false, + DType dtype = DType.float32, +}) { + final optimizer = createSquaredCostOptimizer( + fittingData, + targetName, + optimizerType: optimizerType, + iterationsLimit: iterationsLimit, + initialLearningRate: initialLearningRate, + minCoefficientsUpdate: minCoefficientsUpdate, + lambda: lambda, + regularizationType: regularizationType, + randomSeed: randomSeed, + batchSize: batchSize, + learningRateType: learningRateType, + initialCoefficientsType: initialCoefficientsType, + fitIntercept: fitIntercept, + interceptScale: interceptScale, + isFittingDataNormalized: isFittingDataNormalized, + dtype: dtype, + ); + + final coefficients = optimizer.findExtrema( + initialCoefficients: initialCoefficients, + isMinimizingObjective: true, + collectLearningData: collectLearningData, + ).getColumn(0); + final costPerIteration = optimizer.costPerIteration; + + return LinearRegressorImpl( + coefficients, + targetName, + fitIntercept: fitIntercept, + interceptScale: interceptScale, + costPerIteration: costPerIteration, + dtype: dtype, + ); +} diff --git a/lib/src/regressor/linear_regressor/linear_regressor.dart b/lib/src/regressor/linear_regressor/linear_regressor.dart index 2037d7c7..c7919923 100644 --- a/lib/src/regressor/linear_regressor/linear_regressor.dart +++ b/lib/src/regressor/linear_regressor/linear_regressor.dart @@ -1,13 +1,13 @@ import 'package:ml_algo/src/common/serializable/serializable.dart'; +import 'package:ml_algo/src/di/common/init_common_module.dart'; import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_type.dart'; import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_type.dart'; import 'package:ml_algo/src/linear_optimizer/linear_optimizer_type.dart'; import 'package:ml_algo/src/linear_optimizer/regularization_type.dart'; import 'package:ml_algo/src/model_selection/assessable.dart'; import 'package:ml_algo/src/predictor/predictor.dart'; -import 'package:ml_algo/src/regressor/_helpers/squared_cost_optimizer_factory.dart'; +import 'package:ml_algo/src/regressor/linear_regressor/_helpers/create_linear_regressor.dart'; import 'package:ml_algo/src/regressor/linear_regressor/_helpers/create_linear_regressor_from_json.dart'; -import 'package:ml_algo/src/regressor/linear_regressor/linear_regressor_impl.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/matrix.dart'; @@ -121,38 +121,26 @@ abstract class LinearRegressor implements Assessable, Serializable, Predictor { bool collectLearningData = false, DType dtype = DType.float32, }) { - final optimizer = createSquaredCostOptimizer( - fittingData, - targetName, + initCommonModule(); + + return createLinearRegressor( + fittingData: fittingData, + targetName: targetName, optimizerType: optimizerType, iterationsLimit: iterationsLimit, + learningRateType: learningRateType, + initialCoefficientsType: initialCoefficientsType, initialLearningRate: initialLearningRate, minCoefficientsUpdate: minCoefficientsUpdate, lambda: lambda, regularizationType: regularizationType, - randomSeed: randomSeed, - batchSize: batchSize, - learningRateType: learningRateType, - initialCoefficientsType: initialCoefficientsType, fitIntercept: fitIntercept, interceptScale: interceptScale, - isFittingDataNormalized: isFittingDataNormalized, - dtype: dtype, - ); - - final coefficients = optimizer.findExtrema( + randomSeed: randomSeed, + batchSize: batchSize, initialCoefficients: initialCoefficients, - isMinimizingObjective: true, + isFittingDataNormalized: isFittingDataNormalized, collectLearningData: collectLearningData, - ).getColumn(0); - final costPerIteration = optimizer.costPerIteration; - - return LinearRegressorImpl( - coefficients, - targetName, - fitIntercept: fitIntercept, - interceptScale: interceptScale, - costPerIteration: costPerIteration, dtype: dtype, ); } diff --git a/lib/src/regressor/linear_regressor/linear_regressor_impl.dart b/lib/src/regressor/linear_regressor/linear_regressor_impl.dart index 7a4e44c6..9483d5e5 100644 --- a/lib/src/regressor/linear_regressor/linear_regressor_impl.dart +++ b/lib/src/regressor/linear_regressor/linear_regressor_impl.dart @@ -1,7 +1,7 @@ import 'package:json_annotation/json_annotation.dart'; import 'package:ml_algo/src/common/serializable/serializable_mixin.dart'; import 'package:ml_algo/src/helpers/add_intercept_if.dart'; -import 'package:ml_algo/src/predictor/assessable_predictor_mixin.dart'; +import 'package:ml_algo/src/regressor/_mixins/assessable_regressor_mixin.dart'; import 'package:ml_algo/src/regressor/linear_regressor/linear_regressor.dart'; import 'package:ml_algo/src/regressor/linear_regressor/linear_regressor_json_keys.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; @@ -17,7 +17,7 @@ part 'linear_regressor_impl.g.dart'; @JsonSerializable() class LinearRegressorImpl with - AssessablePredictorMixin, + AssessableRegressorMixin, SerializableMixin implements LinearRegressor { @@ -72,6 +72,9 @@ class LinearRegressorImpl ) final DType dtype; + @override + Iterable get targetNames => [targetName]; + @override DataFrame predict(DataFrame features) { final prediction = addInterceptIf( @@ -83,7 +86,7 @@ class LinearRegressorImpl return DataFrame.fromMatrix( prediction, - header: [targetName], + header: targetNames, ); } } diff --git a/lib/src/tree_trainer/_helpers/create_decision_tree_trainer.dart b/lib/src/tree_trainer/_helpers/create_decision_tree_trainer.dart index 2f7d2f31..5e198a3d 100644 --- a/lib/src/tree_trainer/_helpers/create_decision_tree_trainer.dart +++ b/lib/src/tree_trainer/_helpers/create_decision_tree_trainer.dart @@ -1,4 +1,4 @@ -import 'package:ml_algo/src/di/dependencies.dart'; +import 'package:ml_algo/src/classifier/decision_tree_classifier/_injector.dart'; import 'package:ml_algo/src/tree_trainer/leaf_label/leaf_label_factory_type.dart'; import 'package:ml_algo/src/tree_trainer/split_assessor/split_assessor_type.dart'; import 'package:ml_algo/src/tree_trainer/split_selector/split_selector_type.dart'; @@ -19,10 +19,8 @@ TreeTrainer createDecisionTreeTrainer( final targetIdx = enumerate(samples.header) .firstWhere((indexedName) => indexedName.value == targetName) .index; - final featuresIndexedSeries = enumerate(samples.series) .where((indexed) => indexed.index != targetIdx); - final featureIdxToUniqueValues = Map.fromEntries( featuresIndexedSeries .where((indexed) => indexed.value.isDiscrete) @@ -34,12 +32,12 @@ TreeTrainer createDecisionTreeTrainer( ), ), ); - - final trainerFactory = dependencies.get(); + final trainerFactory = decisionTreeInjector.get(); return trainerFactory.createByType( TreeTrainerType.decision, - featuresIndexedSeries.map((indexed) => indexed.index), + featuresIndexedSeries + .map((indexed) => indexed.index), targetIdx, featureIdxToUniqueValues, minErrorOnNode, diff --git a/pubspec.yaml b/pubspec.yaml index 6dca1e30..78ed08c4 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -1,6 +1,6 @@ name: ml_algo description: Machine learning algorithms, Machine learning models performance evaluation functionality -version: 14.2.6 +version: 15.0.0 homepage: https://github.com/gyrdym/ml_algo environment: @@ -12,6 +12,7 @@ dependencies: json_serializable: ^3.3.0 ml_dataframe: ^0.2.0 ml_linalg: ^12.17.3 + ml_preprocessing: ^5.2.0 quiver: ^2.0.2 xrange: ^0.0.8 @@ -22,4 +23,5 @@ dev_dependencies: grinder: ^0.8.3 ml_tech: ^0.0.8 mockito: ^3.0.0 + process_run: ^0.10.12 test: ^1.2.0 diff --git a/test/classifier/decision_tree_classifier/decision_tree_classifier_impl_test.dart b/test/classifier/decision_tree_classifier/decision_tree_classifier_impl_test.dart index 41f66ecd..560d4c95 100644 --- a/test/classifier/decision_tree_classifier/decision_tree_classifier_impl_test.dart +++ b/test/classifier/decision_tree_classifier/decision_tree_classifier_impl_test.dart @@ -1,5 +1,13 @@ +import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_algo/src/classifier/classifier.dart'; +import 'package:ml_algo/src/classifier/decision_tree_classifier/_injector.dart'; import 'package:ml_algo/src/classifier/decision_tree_classifier/decision_tree_classifier_impl.dart'; import 'package:ml_algo/src/classifier/decision_tree_classifier/decision_tree_json_keys.dart'; +import 'package:ml_algo/src/di/common/init_common_module.dart'; +import 'package:ml_algo/src/di/dependency_keys.dart'; +import 'package:ml_algo/src/di/injector.dart'; +import 'package:ml_algo/src/metric/metric_factory.dart'; +import 'package:ml_algo/src/model_selection/model_assessor/model_assessor.dart'; import 'package:ml_algo/src/tree_trainer/leaf_label/leaf_label.dart'; import 'package:ml_algo/src/tree_trainer/tree_node/tree_node.dart'; import 'package:ml_algo/src/tree_trainer/tree_node/tree_node_json_keys.dart'; @@ -16,48 +24,100 @@ import '../../mocks.dart'; void main() { group('DecisionTreeClassifierImpl', () { + final classifierAssessorMock = ClassifierAssessorMock(); final sample1 = Vector.fromList([1, 2, 3]); final sample2 = Vector.fromList([10, 20, 30]); final sample3 = Vector.fromList([100, 200, 300]); - - final label1 = TreeLeafLabel(0, probability: 0.7); - final label2 = TreeLeafLabel(1, probability: 0.55); - final label3 = TreeLeafLabel(2, probability: 0.5); - - final targetColumnName = 'class_name'; - + final label1 = 100; + final label2 = 300; + final label3 = 200; + final sample1WithLabel = Vector.fromList([...sample1, label1]); + final sample2WithLabel = Vector.fromList([...sample2, label2]); + final sample3WithLabel = Vector.fromList([...sample3, label3]); + final predictedBinarizedLabels = [ + [0, 0, 1], + [1, 0, 0], + [0, 1, 0], + ]; + final originalBinarizedLabels = [ + [1, 0, 0], + [0, 1, 0], + [0, 0, 1], + ]; + final learnedLeafLabel1 = TreeLeafLabel(label3, probability: 0.7); + final learnedLeafLabel2 = TreeLeafLabel(label1, probability: 0.55); + final learnedLeafLabel3 = TreeLeafLabel(label2, probability: 0.5); final features = Matrix.fromRows([ sample1, sample2, sample3, ]); - + final labelledFeatures = Matrix.fromRows([ + sample1WithLabel, + sample2WithLabel, + sample3WithLabel, + ]); + final unlabelledFeaturesFrame = DataFrame.fromMatrix(features); + final labelledFeaturesFrame = DataFrame.fromMatrix(labelledFeatures); + final targetColumnName = labelledFeaturesFrame.header.last; + final predictedLabelsFrame = DataFrame( + [ + [label3], + [label1], + [label2], + ], + headerExists: false, + header: [targetColumnName]); + final predictedBinarizedLabelsFrame = DataFrame(predictedBinarizedLabels, + headerExists: false, header: [targetColumnName]); + final originalBinarizedLabelsFrame = DataFrame(originalBinarizedLabels, + headerExists: false, header: [targetColumnName]); final rootNodeJson = { childrenJsonKey: >[], }; - final classifier32Json = { dTypeJsonKey: dTypeToJson(DType.float32), targetColumnNameJsonKey: targetColumnName, treeRootNodeJsonKey: rootNodeJson, }; - final classifier64Json = { dTypeJsonKey: dTypeToJson(DType.float64), targetColumnNameJsonKey: targetColumnName, treeRootNodeJsonKey: rootNodeJson, }; - final treeRootMock = createRootNodeMock({ - sample1: label1, - sample2: label2, - sample3: label3, + sample1: learnedLeafLabel1, + sample2: learnedLeafLabel2, + sample3: learnedLeafLabel3, }, rootNodeJson); + final metricFactoryMock = MetricFactoryMock(); + final metricMock = MetricMock(); + final encoderFactoryMock = EncoderFactoryMock(); + final encoderMock = EncoderMock(); + final encodedLabelsFrames = [ + predictedBinarizedLabelsFrame, + originalBinarizedLabelsFrame, + ]; + var encoderCallIteration = 0; DecisionTreeClassifierImpl classifier32; DecisionTreeClassifierImpl classifier64; setUp(() { + when(metricFactoryMock.createByType(argThat(isA()))) + .thenReturn(metricMock); + when(encoderFactoryMock.create(any, any)).thenReturn(encoderMock); + when(encoderMock.process(any)) + .thenAnswer((_) => encodedLabelsFrames[encoderCallIteration++]); + + injector + ..clearAll() + ..registerDependency>( + () => classifierAssessorMock) + ..registerDependency(() => encoderFactoryMock.create, + dependencyName: oneHotEncoderFactoryKey) + ..registerSingleton(() => metricFactoryMock); + classifier32 = DecisionTreeClassifierImpl( treeRootMock, targetColumnName, @@ -71,36 +131,57 @@ void main() { ); }); - test('should return data frame with a correct header', () { - final predictedLabels = classifier32.predictProbabilities( - DataFrame.fromMatrix(features), - ); - expect(predictedLabels.header, equals([targetColumnName])); + tearDown(() { + reset(metricFactoryMock); + reset(metricMock); + reset(encoderFactoryMock); + reset(encoderMock); + encoderCallIteration = 0; + + injector.clearAll(); + decisionTreeInjector.clearAll(); + }); + + test('should predict labels for passed unlabelled features dataframe', () { + final actual = classifier32.predict(unlabelledFeaturesFrame); + + expect(actual.toMatrix(), predictedLabelsFrame.toMatrix()); }); - test('should return data frame with empty header if input matrix is ' + test('should return predicted labels with a proper header', () { + final actual = classifier32.predict(unlabelledFeaturesFrame); + + expect(actual.header, classifier32.targetNames); + }); + + test( + 'should return data frame with empty header if input matrix is ' 'empty', () { final predictedClasses = classifier32.predict(DataFrame([[]])); + expect(predictedClasses.header, isEmpty); }); - test('should return data frame with empty matrix if input feature matrix is ' + test( + 'should return data frame with empty matrix if input feature matrix is ' 'empty', () { final predictedClasses = classifier32.predict(DataFrame([[]])); + expect(predictedClasses.toMatrix(), isEmpty); }); - test('should return data frame with probabilities for each class label', () { - final predictedLabels = classifier32.predictProbabilities( - DataFrame.fromMatrix(features), - ); + test('should return data frame with probabilities for each class label', + () { + final predictedLabels = + classifier32.predictProbabilities(unlabelledFeaturesFrame); + expect( - predictedLabels.toMatrix(), - iterable2dAlmostEqualTo([ - [label1.probability.toDouble()], - [label2.probability.toDouble()], - [label3.probability.toDouble()], - ]), + predictedLabels.toMatrix(), + iterable2dAlmostEqualTo([ + [learnedLeafLabel1.probability.toDouble()], + [learnedLeafLabel2.probability.toDouble()], + [learnedLeafLabel3.probability.toDouble()], + ]), ); }); @@ -131,30 +212,40 @@ void main() { expect(classifier.targetColumnName, equals(targetColumnName)); expect(classifier.treeRootNode, isNotNull); }); + + test('should call classifier assessor, dtype=DType.float32', () { + final metricType = MetricType.precision; + + classifier32.assess(labelledFeaturesFrame, metricType); + verify(classifierAssessorMock.assess( + classifier32, + metricType, + labelledFeaturesFrame, + )).called(1); + }); }); } -TreeNode createRootNodeMock(Map samples, +TreeNode createRootNodeMock(Map samplesByLabel, [Map jsonMock = const {}]) { - final rootMock = TreeNodeMock(); final children = []; when(rootMock.isLeaf).thenReturn(false); - samples.forEach((sample, leafLabel) { + samplesByLabel.forEach((sample, leafLabel) { final node = TreeNodeMock(); when(node.label).thenReturn(leafLabel); when(node.isLeaf).thenReturn(true); - samples.forEach((otherSample, _) { - when(node.isSamplePassed(otherSample)).thenReturn(sample == otherSample); - }); + samplesByLabel.forEach((otherSample, _) => + when(node.isSamplePassed(otherSample)) + .thenReturn(sample == otherSample)); children.add(node); }); - + when(rootMock.children).thenReturn(children); when(rootMock.toJson()).thenReturn(jsonMock); diff --git a/test/classifier/decision_tree_classifier/decision_tree_classifier_test.dart b/test/classifier/decision_tree_classifier/decision_tree_classifier_integration_test.dart similarity index 99% rename from test/classifier/decision_tree_classifier/decision_tree_classifier_test.dart rename to test/classifier/decision_tree_classifier/decision_tree_classifier_integration_test.dart index 4627652b..95978252 100644 --- a/test/classifier/decision_tree_classifier/decision_tree_classifier_test.dart +++ b/test/classifier/decision_tree_classifier/decision_tree_classifier_integration_test.dart @@ -21,12 +21,10 @@ void main() { ]); final targetName = 'col_8'; - final classifier = DecisionTreeClassifier(fakeDataSet, targetName, minError: 0.3, minSamplesCount: 1, maxDepth: 3); - final testFileName = 'test/classifier/decision_tree_classifier/serialized_classifier.json'; - + test('should create classifier', () { expect(classifier, isA()); }); diff --git a/test/classifier/knn_classifier/knn_classifier_impl_test.dart b/test/classifier/knn_classifier/knn_classifier_impl_test.dart index 126db5c1..7b65fe6a 100644 --- a/test/classifier/knn_classifier/knn_classifier_impl_test.dart +++ b/test/classifier/knn_classifier/knn_classifier_impl_test.dart @@ -1,5 +1,8 @@ +import 'package:ml_algo/src/classifier/knn_classifier/_injector.dart'; import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier_impl.dart'; +import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/knn_solver/neigbour.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/linalg.dart'; import 'package:ml_tech/unit_testing/matchers/iterable_2d_almost_equal_to.dart'; @@ -17,6 +20,8 @@ void main() { tearDown(() { reset(solverMock); reset(kernelMock); + injector.clearAll(); + knnClassifierInjector.clearAll(); }); test('should throw an exception if no class labels are provided', () { @@ -462,11 +467,8 @@ void main() { final firstClassLabel = 1; final secondClassLabel = 2; final thirdClassLabel = 3; - final unexpectedClassLabel = 100; - final classLabels = [thirdClassLabel, firstClassLabel, secondClassLabel]; - final classifier = KnnClassifierImpl( 'target', classLabels, diff --git a/test/classifier/knn_classifier/knn_classifier_test.dart b/test/classifier/knn_classifier/knn_classifier_test.dart index 3f92953c..09db0b1b 100644 --- a/test/classifier/knn_classifier/knn_classifier_test.dart +++ b/test/classifier/knn_classifier/knn_classifier_test.dart @@ -1,5 +1,6 @@ -import 'package:injector/injector.dart'; import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_algo/src/classifier/knn_classifier/_helpers/create_knn_classifier.dart'; +import 'package:ml_algo/src/classifier/knn_classifier/_injector.dart'; import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier.dart'; import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier_factory.dart'; import 'package:ml_algo/src/di/injector.dart'; @@ -49,7 +50,7 @@ void main() { knnClassifierFactoryMock = createKnnClassifierFactoryMock( knnClassifierMock); - injector = Injector() + knnClassifierInjector ..registerSingleton(() => kernelFactoryMock) ..registerSingleton(() => solverFactoryMock) ..registerSingleton(() => knnClassifierFactoryMock); @@ -65,17 +66,18 @@ void main() { reset(knnClassifierMock); reset(knnClassifierFactoryMock); - injector = null; + injector.clearAll(); + knnClassifierInjector.clearAll(); }); test('should call kernel factory with proper kernel type', () { - KnnClassifier( + createKnnClassifier( data, targetName, 2, - kernel: KernelType.uniform, - distance: Distance.cosine, - dtype: DType.float32, + KernelType.uniform, + Distance.cosine, + DType.float32, ); verify(kernelFactoryMock.createByType(KernelType.uniform)).called(1); @@ -83,13 +85,13 @@ void main() { test('should call solver factory with proper train features, train labels, ' 'k parameter, distance type and standardization flag', () { - KnnClassifier( + createKnnClassifier( data, targetName, 2, - kernel: KernelType.uniform, - distance: Distance.hamming, - dtype: DType.float32, + KernelType.uniform, + Distance.hamming, + DType.float32, ); verify(solverFactoryMock.create( @@ -112,13 +114,13 @@ void main() { }); test('should call KnnClassifierFactory in order to create a classifier', () { - final classifier = KnnClassifier( + final classifier = createKnnClassifier( data, targetName, 2, - kernel: KernelType.uniform, - distance: Distance.cosine, - dtype: DType.float32, + KernelType.uniform, + Distance.cosine, + DType.float32, ); verify(knnClassifierFactoryMock.create( @@ -141,13 +143,13 @@ void main() { ], ); - KnnClassifier( + createKnnClassifier( data, 'target', 2, - kernel: KernelType.uniform, - distance: Distance.hamming, - dtype: DType.float32, + KernelType.uniform, + Distance.hamming, + DType.float32, ); final expectedLabels = [1, 3, 2]; @@ -158,13 +160,13 @@ void main() { test('should throw an exception if target column does not exist in the ' 'train data', () { - final actual = () => KnnClassifier( + final actual = () => createKnnClassifier( data, 'unknown_column', 2, - kernel: KernelType.uniform, - distance: Distance.hamming, - dtype: DType.float32, + KernelType.uniform, + Distance.hamming, + DType.float32, ); expect(actual, throwsException); diff --git a/test/classifier/logistic_regressor/integration_test/logistic_regressor_default_constructor_integration_test.dart b/test/classifier/logistic_regressor/integration_test/logistic_regressor_default_constructor_integration_test.dart index 8ad5a263..2d8af1f1 100644 --- a/test/classifier/logistic_regressor/integration_test/logistic_regressor_default_constructor_integration_test.dart +++ b/test/classifier/logistic_regressor/integration_test/logistic_regressor_default_constructor_integration_test.dart @@ -1,3 +1,4 @@ +import 'package:ml_algo/src/classifier/logistic_regressor/_injector.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor.dart'; import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_type.dart'; @@ -44,7 +45,10 @@ void main() { final targetName = 'col_3'; final samples = DataFrame(data, headerExists: false); - tearDownAll(() => injector = null); + tearDown(() { + injector.clearAll(); + logisticRegressorInjector.clearAll(); + }); test('should fit given data, float32 case', () { final classifier = LogisticRegressor( @@ -91,6 +95,11 @@ void main() { ], headerExists: false); final targetName = 'col_3'; + tearDown(() { + injector.clearAll(); + logisticRegressorInjector.clearAll(); + }); + test('should consider intercept term, dtype=DType.float32', () { final classifier = createClassifier(samples: features, targetName: targetName, dtype: DType.float32, batchSize: 2); diff --git a/test/classifier/logistic_regressor/integration_test/logistic_regressor_prediction_integration_test.dart b/test/classifier/logistic_regressor/integration_test/logistic_regressor_prediction_integration_test.dart index 3bc4f9be..b7c00a7e 100644 --- a/test/classifier/logistic_regressor/integration_test/logistic_regressor_prediction_integration_test.dart +++ b/test/classifier/logistic_regressor/integration_test/logistic_regressor_prediction_integration_test.dart @@ -1,5 +1,4 @@ import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor.dart'; -import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_type.dart'; import 'package:ml_algo/src/metric/metric_type.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; @@ -18,18 +17,20 @@ void main() { ]; final targetName = 'col_3'; final samples = DataFrame(data, headerExists: false); - final classifier = LogisticRegressor( - samples, - targetName, - iterationsLimit: 2, - learningRateType: LearningRateType.constant, - initialLearningRate: 1.0, - batchSize: 5, - fitIntercept: false, - dtype: dtype, - ); + LogisticRegressor classifier; - tearDownAll(() => injector = null); + setUp(() { + classifier = LogisticRegressor( + samples, + targetName, + iterationsLimit: 2, + learningRateType: LearningRateType.constant, + initialLearningRate: 1.0, + batchSize: 5, + fitIntercept: false, + dtype: dtype, + ); + }); test('should make prediction', () { final newFeatures = Matrix.fromList([ @@ -53,10 +54,9 @@ void main() { test('should evaluate prediction quality, accuracy = 0', () { final newSamples = DataFrame([ [2.0, 4.0, 1.0, 1.0], - ], header: ['first', 'second', 'third', 'target'], headerExists: false); + ], headerExists: false); - final score = classifier.assess(newSamples, ['target'], - MetricType.accuracy); + final score = classifier.assess(newSamples, MetricType.accuracy); expect(score, equals(0.0)); }); @@ -64,20 +64,21 @@ void main() { test('should evaluate prediction quality, accuracy = 1', () { final newFeatures = DataFrame([ [2, 4, 1, 0], - ], header: ['first', 'second', 'third', 'target'], headerExists: false); + ], headerExists: false); - final score = classifier.assess(newFeatures, ['target'], - MetricType.accuracy); + final score = classifier.assess(newFeatures, MetricType.accuracy); expect(score, equals(1.0)); }); }; - group('LogisticRegressor.predict (dtype=DType.float32)', () { - testPredictMethod(DType.float32); - }); + group('LogisticRegressor.predict', () { + group('(dtype=DType.float32)', () { + testPredictMethod(DType.float32); + }); - group('LogisticRegressor.predict (dtype=DType.float64)', () { - testPredictMethod(DType.float64); + group('(dtype=DType.float64)', () { + testPredictMethod(DType.float64); + }); }); } diff --git a/test/classifier/logistic_regressor/integration_test/logistic_regressor_serialization_integration_test.dart b/test/classifier/logistic_regressor/integration_test/logistic_regressor_serialization_integration_test.dart index 5c3cbe62..5991e153 100644 --- a/test/classifier/logistic_regressor/integration_test/logistic_regressor_serialization_integration_test.dart +++ b/test/classifier/logistic_regressor/integration_test/logistic_regressor_serialization_integration_test.dart @@ -1,7 +1,9 @@ import 'dart:io'; +import 'package:ml_algo/src/classifier/logistic_regressor/_injector.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor_json_keys.dart'; +import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/linear_optimizer/gradient_optimizer/learning_rate_generator/learning_rate_type.dart'; import 'package:ml_algo/src/link_function/link_function_encoded_values.dart'; import 'package:ml_algo/src/link_function/logit/float32_inverse_logit_function.dart'; @@ -283,6 +285,9 @@ void main() { if (await file.exists()) { await file.delete(); } + + injector.clearAll(); + logisticRegressorInjector.clearAll(); }); test('should return a pointer to a json file while saving serialized ' diff --git a/test/classifier/logistic_regressor/unit_test/logistic_regressor_impl_test.dart b/test/classifier/logistic_regressor/unit_test/logistic_regressor_impl_test.dart index 4102faeb..1055c2c3 100644 --- a/test/classifier/logistic_regressor/unit_test/logistic_regressor_impl_test.dart +++ b/test/classifier/logistic_regressor/unit_test/logistic_regressor_impl_test.dart @@ -1,6 +1,9 @@ +import 'package:ml_algo/src/classifier/logistic_regressor/_injector.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor_impl.dart'; import 'package:ml_algo/src/common/exception/invalid_class_labels_exception.dart'; import 'package:ml_algo/src/common/exception/invalid_probability_threshold_exception.dart'; +import 'package:ml_algo/src/di/injector.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/linalg.dart'; import 'package:mockito/mockito.dart'; @@ -58,6 +61,8 @@ void main() { tearDown(() { reset(linkFunctionMock); + injector.clearAll(); + logisticRegressorInjector.clearAll(); }); group('default constructor', () { @@ -263,5 +268,14 @@ void main() { expect(prediction.header, equals([className])); }); }); + + group('LogisticRegressor.allowedMetrics', () { + test('should contain appropriate metrics', () { + expect(regressor.allowedMetrics, [ + MetricType.accuracy, + MetricType.precision, + ]); + }); + }); }); } diff --git a/test/classifier/logistic_regressor/unit_test/logistic_regressor_test.dart b/test/classifier/logistic_regressor/unit_test/logistic_regressor_test.dart index 464db7db..09b16f5b 100644 --- a/test/classifier/logistic_regressor/unit_test/logistic_regressor_test.dart +++ b/test/classifier/logistic_regressor/unit_test/logistic_regressor_test.dart @@ -1,5 +1,6 @@ -import 'package:injector/injector.dart'; -import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor.dart'; +import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_algo/src/classifier/logistic_regressor/_helpers/create_logistic_regressor.dart'; +import 'package:ml_algo/src/classifier/logistic_regressor/_injector.dart'; import 'package:ml_algo/src/common/exception/invalid_probability_threshold_exception.dart'; import 'package:ml_algo/src/cost_function/cost_function.dart'; import 'package:ml_algo/src/cost_function/cost_function_factory.dart'; @@ -13,7 +14,6 @@ import 'package:ml_algo/src/linear_optimizer/linear_optimizer_type.dart'; import 'package:ml_algo/src/linear_optimizer/regularization_type.dart'; import 'package:ml_algo/src/link_function/link_function.dart'; import 'package:ml_algo/src/link_function/link_function_dependency_tokens.dart'; -import 'package:ml_algo/src/link_function/logit/float32_inverse_logit_function.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/matrix.dart'; @@ -43,25 +43,28 @@ void main() { CostFunctionFactory costFunctionFactoryMock; LinearOptimizer optimizerMock; LinearOptimizerFactory optimizerFactoryMock; - LogisticRegressor logisticRegressorMock; setUp(() { + injector.clearAll(); + logisticRegressorInjector.clearAll(); + linkFunctionMock = LinkFunctionMock(); costFunctionMock = CostFunctionMock(); costFunctionFactoryMock = createCostFunctionFactoryMock(costFunctionMock); optimizerMock = LinearOptimizerMock(); optimizerFactoryMock = createLinearOptimizerFactoryMock(optimizerMock); - logisticRegressorMock = LogisticRegressorMock(); - injector = Injector() - ..registerSingleton( - () => linkFunctionMock, - dependencyName: float32InverseLogitLinkFunctionToken) + injector ..registerDependency( () => costFunctionFactoryMock) ..registerSingleton( () => optimizerFactoryMock); + logisticRegressorInjector + ..registerSingleton( + () => linkFunctionMock, + dependencyName: float32InverseLogitLinkFunctionToken); + when(optimizerMock.findExtrema( initialCoefficients: anyNamed('initialCoefficients'), isMinimizingObjective: anyNamed('isMinimizingObjective'), @@ -70,15 +73,19 @@ void main() { when(optimizerMock.costPerIteration).thenReturn(errors); }); - tearDownAll(() => injector = null); + tearDown(() { + injector.clearAll(); + logisticRegressorInjector.clearAll(); + }); test('should throw an exception if a probability threshold is less than ' '0.0', () { final probabilityThreshold = -0.01; - final actual = () => LogisticRegressor( - observations, - targetColumnName, + final actual = () => createLogisticRegressor( + trainData: observations, + targetName: targetColumnName, probabilityThreshold: probabilityThreshold, + initialCoefficients: Vector.empty(), ); expect(actual, throwsA(isA())); @@ -87,10 +94,11 @@ void main() { test('should throw an exception if a probability threshold is equal to ' '0.0', () { final probabilityThreshold = 0.0; - final actual = () => LogisticRegressor( - observations, - targetColumnName, + final actual = () => createLogisticRegressor( + trainData: observations, + targetName: targetColumnName, probabilityThreshold: probabilityThreshold, + initialCoefficients: Vector.empty(), ); expect(actual, throwsA(isA())); @@ -99,10 +107,11 @@ void main() { test('should throw an exception if a probability threshold is equal to ' '1.0', () { final probabilityThreshold = 1.0; - final actual = () => LogisticRegressor( - observations, - targetColumnName, + final actual = () => createLogisticRegressor( + trainData: observations, + targetName: targetColumnName, probabilityThreshold: probabilityThreshold, + initialCoefficients: Vector.empty(), ); expect(actual, throwsA(isA())); @@ -111,10 +120,11 @@ void main() { test('should throw an exception if a probability threshold is greater than ' '1.0', () { final probabilityThreshold = 1.01; - final actual = () => LogisticRegressor( - observations, - targetColumnName, + final actual = () => createLogisticRegressor( + trainData: observations, + targetName: targetColumnName, probabilityThreshold: probabilityThreshold, + initialCoefficients: Vector.empty(), ); expect(actual, throwsA(isA())); @@ -123,9 +133,10 @@ void main() { test('should throw an exception if a target column does not exist', () { final targetColumnName = 'col_10'; - final actual = () => LogisticRegressor( - observations, - targetColumnName, + final actual = () => createLogisticRegressor( + trainData: observations, + targetName: targetColumnName, + initialCoefficients: Vector.empty(), ); expect(actual, throwsException); @@ -133,9 +144,9 @@ void main() { test('should throw an exception if too few initial coefficients ' 'provided', () { - final actual = () => LogisticRegressor( - observations, - targetColumnName, + final actual = () => createLogisticRegressor( + trainData: observations, + targetName: targetColumnName, initialCoefficients: Vector.fromList([1, 2]), ); @@ -147,9 +158,9 @@ void main() { final targetColumnName = 'col_4'; - final actual = () => LogisticRegressor( - observations, - targetColumnName, + final actual = () => createLogisticRegressor( + trainData: observations, + targetName: targetColumnName, initialCoefficients: Vector.fromList([1, 2, 3, 4, 5, 6]), ); @@ -158,11 +169,12 @@ void main() { test('should call cost function factory in order to create ' 'loglikelihood cost function', () { - LogisticRegressor( - observations, - 'col_4', + createLogisticRegressor( + trainData: observations, + targetName: 'col_4', positiveLabel: positiveLabel, negativeLabel: negativeLabel, + initialCoefficients: Vector.empty(), ); verify(costFunctionFactoryMock.createByType( @@ -174,9 +186,9 @@ void main() { }); test('should call linear optimizer factory and consider intercept term', () { - LogisticRegressor( - observations, - 'col_4', + createLogisticRegressor( + trainData: observations, + targetName: 'col_4', learningRateType: LearningRateType.decreasingAdaptive, initialCoefficientsType: InitialCoefficientsType.zeroes, iterationsLimit: 1000, @@ -221,9 +233,9 @@ void main() { test('should find the extrema for provided observations while ' 'instantiating', () { - LogisticRegressor( - observations, - 'col_4', + createLogisticRegressor( + trainData: observations, + targetName: 'col_4', initialCoefficients: initialCoefficients, fitIntercept: true, ); @@ -244,11 +256,12 @@ void main() { final interceptScale = -12.0; final dtype = DType.float32; - final classifier = LogisticRegressor( - observations, - targetName, + final classifier = createLogisticRegressor( + trainData: observations, + targetName: targetName, probabilityThreshold: probabilityThreshold, fitIntercept: fitIntercept, + initialCoefficients: Vector.empty(), interceptScale: interceptScale, isFittingDataNormalized: true, positiveLabel: positiveLabel, diff --git a/test/classifier/softmax_regressor/integration_test/softmax_regressor_serialization_integration_test.dart b/test/classifier/softmax_regressor/integration_test/softmax_regressor_serialization_integration_test.dart index 8d2411f2..9681caac 100644 --- a/test/classifier/softmax_regressor/integration_test/softmax_regressor_serialization_integration_test.dart +++ b/test/classifier/softmax_regressor/integration_test/softmax_regressor_serialization_integration_test.dart @@ -1,7 +1,10 @@ import 'dart:io'; import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_algo/src/classifier/softmax_regressor/_init_module.dart'; +import 'package:ml_algo/src/classifier/softmax_regressor/_injector.dart'; import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor_json_keys.dart'; +import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/link_function/link_function_encoded_values.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; @@ -56,6 +59,11 @@ void main() { }; group('SoftmaxRegressor.toJson', () { + tearDown(() { + injector.clearAll(); + softmaxRegressorInjector.clearAll(); + }); + test('should serialize classNames field', () { final classifier = createClassifier(); final serialized = classifier.toJson(); @@ -197,6 +205,9 @@ void main() { if (await file.exists()) { await file.delete(); } + + injector.clearAll(); + softmaxRegressorInjector.clearAll(); }); test('should return a pointer to a file while saving the model into the ' diff --git a/test/classifier/softmax_regressor/unit_test/softmax_regressor_impl_test.dart b/test/classifier/softmax_regressor/unit_test/softmax_regressor_impl_test.dart index a2ad8acf..0bc09194 100644 --- a/test/classifier/softmax_regressor/unit_test/softmax_regressor_impl_test.dart +++ b/test/classifier/softmax_regressor/unit_test/softmax_regressor_impl_test.dart @@ -1,4 +1,7 @@ +import 'package:ml_algo/src/classifier/softmax_regressor/_injector.dart'; import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor_impl.dart'; +import 'package:ml_algo/src/di/injector.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/matrix.dart'; @@ -99,6 +102,8 @@ void main() { tearDown(() { reset(linkFunctionMock); + injector.clearAll(); + softmaxRegressorInjector.clearAll(); }); group('constructor', () { @@ -256,5 +261,14 @@ void main() { expect(regressor.costPerIteration, costPerIteration); }); }); + + group('LogisticRegressor.allowedMetrics', () { + test('should contain appropriate metrics', () { + expect(regressor.allowedMetrics, [ + MetricType.accuracy, + MetricType.precision, + ]); + }); + }); }); } diff --git a/test/classifier/softmax_regressor/unit_test/softmax_regressor_test.dart b/test/classifier/softmax_regressor/unit_test/softmax_regressor_test.dart index e982d5b3..7542d88a 100644 --- a/test/classifier/softmax_regressor/unit_test/softmax_regressor_test.dart +++ b/test/classifier/softmax_regressor/unit_test/softmax_regressor_test.dart @@ -1,5 +1,7 @@ import 'package:injector/injector.dart'; import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_algo/src/classifier/softmax_regressor/_helpers/create_softmax_regressor.dart'; +import 'package:ml_algo/src/classifier/softmax_regressor/_injector.dart'; import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor.dart'; import 'package:ml_algo/src/classifier/softmax_regressor/softmax_regressor_factory.dart'; import 'package:ml_algo/src/cost_function/cost_function.dart'; @@ -96,15 +98,21 @@ void main() { softmaxRegressorFactoryMock = createSoftmaxRegressorFactoryMock( softmaxRegressorMock); - injector = Injector() + softmaxRegressorInjector + ..clearAll() ..registerSingleton(() => linkFunctionMock, dependencyName: float32SoftmaxLinkFunctionToken) - ..registerDependency( - () => costFunctionFactoryMock) - ..registerSingleton(() => optimizerFactoryMock) + ..registerSingleton(() => linkFunctionMock, + dependencyName: float64SoftmaxLinkFunctionToken) ..registerSingleton( () => softmaxRegressorFactoryMock); + injector + ..clearAll() + ..registerDependency( + () => costFunctionFactoryMock) + ..registerSingleton(() => optimizerFactoryMock); + when(optimizerMock.findExtrema( initialCoefficients: anyNamed('initialCoefficients'), isMinimizingObjective: anyNamed('isMinimizingObjective'), @@ -113,14 +121,17 @@ void main() { when(optimizerMock.costPerIteration).thenReturn(costPerIteration); }); - tearDownAll(() => injector = null); + tearDown(() { + injector.clearAll(); + softmaxRegressorInjector.clearAll(); + }); test('should throw an exception if some target columns do not exist', () { final targetColumnNames = ['target_1', 'some', 'unknown', 'columns']; - final actual = () => SoftmaxRegressor( - observations, - targetColumnNames, + final actual = () => createSoftmaxRegressor( + trainData: observations, + targetNames: targetColumnNames, ); expect(actual, throwsException); @@ -134,9 +145,9 @@ void main() { final targetColumnNames = ['target_1']; - final actual = () => SoftmaxRegressor( - observations, - targetColumnNames, + final actual = () => createSoftmaxRegressor( + trainData: observations, + targetNames: targetColumnNames, ); expect(actual, throwsException); @@ -144,11 +155,12 @@ void main() { test('should call cost function factory in order to create ' 'loglikelihood cost function', () { - SoftmaxRegressor( - observations, - ['target_1', 'target_2', 'target_3'], + createSoftmaxRegressor( + trainData: observations, + targetNames: ['target_1', 'target_2', 'target_3'], positiveLabel: positiveLabel, negativeLabel: negativeLabel, + dtype: DType.float32, ); verify(costFunctionFactoryMock.createByType( @@ -161,9 +173,9 @@ void main() { test('should call linear optimizer factory and consider intercept term ' 'while calling the factory', () { - SoftmaxRegressor( - observations, - ['target_1', 'target_2', 'target_3'], + createSoftmaxRegressor( + trainData: observations, + targetNames: ['target_1', 'target_2', 'target_3'], optimizerType: LinearOptimizerType.gradient, learningRateType: LearningRateType.constant, initialCoefficientsType: InitialCoefficientsType.zeroes, @@ -215,10 +227,11 @@ void main() { test('should find the extrema for fitting observations while ' 'instantiating', () { - SoftmaxRegressor( - observations, - ['target_1', 'target_2', 'target_3'], + createSoftmaxRegressor( + trainData: observations, + targetNames: ['target_1', 'target_2', 'target_3'], initialCoefficients: initialCoefficients, + dtype: DType.float32, ); verify(optimizerMock.findExtrema( @@ -229,10 +242,11 @@ void main() { test('should pass collectLearningData to the optimizer mock\'s findExtrema ' 'method, collectLearningData=true', () { - SoftmaxRegressor( - observations, - ['target_1', 'target_2', 'target_3'], + createSoftmaxRegressor( + trainData: observations, + targetNames: ['target_1', 'target_2', 'target_3'], collectLearningData: true, + dtype: DType.float32, ); verify(optimizerMock.findExtrema( @@ -244,10 +258,11 @@ void main() { test('should pass collectLearningData to the optimizer mock\'s findExtrema ' 'method, collectLearningData=false', () { - SoftmaxRegressor( - observations, - ['target_1', 'target_2', 'target_3'], + createSoftmaxRegressor( + trainData: observations, + targetNames: ['target_1', 'target_2', 'target_3'], collectLearningData: false, + dtype: DType.float32, ); verify(optimizerMock.findExtrema( @@ -259,10 +274,11 @@ void main() { test('should pass cost per iteration list to the softmax regressor ' 'factory', () { - SoftmaxRegressor( - observations, - ['target_1', 'target_2', 'target_3'], + createSoftmaxRegressor( + trainData: observations, + targetNames: ['target_1', 'target_2', 'target_3'], collectLearningData: true, + dtype: DType.float32, ); verify(softmaxRegressorFactoryMock.create(any, any, any, any, any, any, diff --git a/test/helpers/binarize_column_matrix_test.dart b/test/helpers/binarize_column_matrix_test.dart new file mode 100644 index 00000000..172b3060 --- /dev/null +++ b/test/helpers/binarize_column_matrix_test.dart @@ -0,0 +1,81 @@ +import 'package:ml_algo/src/common/exception/matrix_column_exception.dart'; +import 'package:ml_algo/src/helpers/binarize_column_matrix.dart'; +import 'package:ml_linalg/dtype.dart'; +import 'package:ml_linalg/matrix.dart'; +import 'package:test/test.dart'; + +void main() { + group('binarizeColumnMatrix', () { + final classLabel1 = 21.0; + final classLabel2 = -1001.0; + final classLabel3 = 0.0; + final classLabel4 = 0.67; + final source = [ + [classLabel2], + [classLabel1], + [classLabel1], + [classLabel3], + [classLabel2], + [classLabel3], + [classLabel1], + ]; + final sourceFloat32Matrix = Matrix.fromList(source, dtype: DType.float32); + final sourceFloat64Matrix = Matrix.fromList(source, dtype: DType.float64); + final binarizedMatrix = Matrix.fromList([ + [1, 0, 0], + [0, 1, 0], + [0, 1, 0], + [0, 0, 1], + [1, 0, 0], + [0, 0, 1], + [0, 1, 0], + ]); + final distinctMatrix = Matrix.fromList([ + [classLabel2], + [classLabel1], + [classLabel4], + [classLabel3], + ]); + final binarizedDistinctMatrix = Matrix.fromList([ + [1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 0], + [0, 0, 0, 1], + ]); + + test('should throw an exception if a matrix of improper shape was ' + 'passed', () { + final incorrectMatrix = Matrix.fromList([ + [21, 34, 33], + ]); + + expect(() => binarizeColumnMatrix(incorrectMatrix), + throwsA(isA())); + }); + + test('should throw an exception if an empty matrix was passed', () { + final incorrectMatrix = Matrix.fromList([]); + + expect(() => binarizeColumnMatrix(incorrectMatrix), + throwsA(isA())); + }); + + test('should handle float32 matrix', () { + expect(binarizeColumnMatrix(sourceFloat32Matrix).dtype, + sourceFloat32Matrix.dtype); + }); + + test('should handle float64 matrix', () { + expect(binarizeColumnMatrix(sourceFloat64Matrix).dtype, + sourceFloat64Matrix.dtype); + }); + + test('should binarize source matrix', () { + expect(binarizeColumnMatrix(sourceFloat32Matrix), binarizedMatrix); + }); + + test('should binarize matrix with all distinct elements', () { + expect(binarizeColumnMatrix(distinctMatrix), binarizedDistinctMatrix); + }); + }); +} diff --git a/test/linear_optimizer/coordinate_optimizer/coordinate_descent_optimizer_integration_test.dart b/test/linear_optimizer/coordinate_optimizer/coordinate_descent_optimizer_integration_test.dart index 046842c2..e776803e 100644 --- a/test/linear_optimizer/coordinate_optimizer/coordinate_descent_optimizer_integration_test.dart +++ b/test/linear_optimizer/coordinate_optimizer/coordinate_descent_optimizer_integration_test.dart @@ -1,4 +1,5 @@ import 'package:ml_algo/src/cost_function/least_square_cost_function.dart'; +import 'package:ml_algo/src/di/common/init_common_module.dart'; import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/linear_optimizer/coordinate_optimizer/coordinate_descent_optimizer.dart'; import 'package:ml_algo/src/linear_optimizer/initial_coefficients_generator/initial_coefficients_type.dart'; @@ -26,6 +27,8 @@ void main() { Matrix labels; setUp(() { + initCommonModule(); + data = Matrix.fromList([point1, point2, point3, point4]); labels = Matrix.fromList([ [20.0], @@ -42,7 +45,7 @@ void main() { lambda: lambda); }); - tearDownAll(() => injector = null); + tearDownAll(injector.clearAll); /// (The test case explanation)[https://github.com/gyrdym/ml_algo/wiki/Coordinate-descent-optimizer-(unregularized-case)-should-find-optimal-weights-for-the-given-data] test('should find optimal coefficients for the given data', () { @@ -67,6 +70,7 @@ void main() { Matrix labels; setUp(() { + initCommonModule(); data = Matrix.fromList([point1, point2, point3]); labels = Matrix.fromList([ [2.0], @@ -85,6 +89,8 @@ void main() { ); }); + tearDown(injector.clearAll); + /// (The test case explanation)[https://github.com/gyrdym/ml_algo/wiki/Coordinate-descent-optimizer-(regularized-case)-should-find-optimal-weights-for-the-given-data] test('should find optimal coefficients for the given data', () { // actually, points in this example are not normalized diff --git a/test/linear_optimizer/gradient/gradient_optimizer_test.dart b/test/linear_optimizer/gradient/gradient_optimizer_test.dart index 7d0d9a2d..332529c6 100644 --- a/test/linear_optimizer/gradient/gradient_optimizer_test.dart +++ b/test/linear_optimizer/gradient/gradient_optimizer_test.dart @@ -1,4 +1,3 @@ -import 'package:injector/injector.dart'; import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/linear_optimizer/convergence_detector/convergence_detector.dart'; import 'package:ml_algo/src/linear_optimizer/convergence_detector/convergence_detector_factory.dart'; @@ -79,7 +78,8 @@ void main() { convergenceDetectorFactoryMock = createConvergenceDetectorFactoryMock(convergenceDetectorMock); - injector = Injector() + injector + ..clearAll() ..registerDependency( () => learningRateGeneratorFactoryMock) ..registerDependency( @@ -123,10 +123,9 @@ void main() { tearDown(() { resetMockitoState(); reset(costFunctionMock); + injector.clearAll(); }); - tearDownAll(() => injector = null); - test('should process `batchSize` parameter, batchSize=$batchSize1', () { final optimizer = GradientOptimizer( points, diff --git a/test/metric/classification/precision_test.dart b/test/metric/classification/precision_test.dart new file mode 100644 index 00000000..cb995eb4 --- /dev/null +++ b/test/metric/classification/precision_test.dart @@ -0,0 +1,79 @@ +import 'package:ml_algo/src/metric/classification/precision.dart'; +import 'package:ml_linalg/matrix.dart'; +import 'package:test/test.dart'; + +void main() { + group('PrecisionMetric', () { + final origLabels = Matrix.fromList([ + [1, 0, 0], + [1, 0, 0], + [0, 1, 0], + [0, 0, 1], + [0, 1, 0], + [1, 0, 0], + [0, 0, 1], + ]); + final origLabelsWithZeroColumn = Matrix.fromList([ + [1, 0, 0], + [1, 0, 0], + [0, 1, 0], + [1, 0, 0], + [0, 1, 0], + [1, 0, 0], + [1, 0, 0], + ]); + final predictedLabels = Matrix.fromList([ + [0, 1, 0], + [1, 0, 0], + [0, 1, 0], + [0, 0, 1], + [0, 1, 0], + [1, 0, 0], + [0, 0, 1], + ]); + final predictedLabelsWithZeroColumn = Matrix.fromList([ + [0, 1, 0], + [1, 0, 0], + [0, 1, 0], + [0, 1, 0], + [0, 1, 0], + [1, 0, 0], + [1, 0, 0], + ]); + final metric = const PrecisionMetric(); + + test('should return a correct score', () { + final score = metric.getScore(predictedLabels, origLabels); + + expect(score, closeTo((1 + 2 / 3 + 1) / 3, 1e-5)); + }); + + test('should return a correct score if there is at least one column with ' + 'all zeroes ', () { + final score = metric.getScore(predictedLabelsWithZeroColumn, origLabels); + + expect(score, closeTo((2 / 3 + 2 / 4 + 0) / 3, 1e-5)); + }); + + test('should return a correct score if there is a zero column in the ' + 'original labels', () { + final score = metric.getScore(predictedLabels, origLabelsWithZeroColumn); + + expect(score, closeTo((1 + 2 / 3 + 0) / 3, 1e-5)); + }); + + test('should return a correct score if both original labels and predicted ' + 'labels have zero columns', () { + final score = metric.getScore(predictedLabelsWithZeroColumn, + origLabelsWithZeroColumn); + + expect(score, closeTo((1 + 2 / 4 + 0) / 3, 1e-5)); + }); + + test('should return 1 if predicted labels are correct', () { + final score = metric.getScore(origLabels, origLabels); + + expect(score, 1); + }); + }); +} diff --git a/test/metric/metric_factory_impl_test.dart b/test/metric/metric_factory_impl_test.dart new file mode 100644 index 00000000..df24a550 --- /dev/null +++ b/test/metric/metric_factory_impl_test.dart @@ -0,0 +1,33 @@ +import 'package:ml_algo/src/metric/classification/accuracy.dart'; +import 'package:ml_algo/src/metric/classification/precision.dart'; +import 'package:ml_algo/src/metric/metric_factory_impl.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; +import 'package:ml_algo/src/metric/regression/mape.dart'; +import 'package:ml_algo/src/metric/regression/rmse.dart'; +import 'package:test/test.dart'; + +void main() { + group('MetricFactoryImpl', () { + const factory = MetricFactoryImpl(); + + test('should create RmseMetric instance', () { + expect(factory.createByType(MetricType.rmse), isA()); + }); + + test('should create MapeMetric instance', () { + expect(factory.createByType(MetricType.mape), isA()); + }); + + test('should create AccuracyMetric instance', () { + expect(factory.createByType(MetricType.accuracy), isA()); + }); + + test('should create PrecisionMetric instance', () { + expect(factory.createByType(MetricType.precision), isA()); + }); + + test('should throw an error if null is passed as metric type', () { + expect(() => factory.createByType(null), throwsUnsupportedError); + }); + }); +} diff --git a/test/mocks.dart b/test/mocks.dart index 18205d28..8f69708a 100644 --- a/test/mocks.dart +++ b/test/mocks.dart @@ -1,3 +1,4 @@ +import 'package:ml_algo/src/classifier/classifier.dart'; import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier.dart'; import 'package:ml_algo/src/classifier/knn_classifier/knn_classifier_factory.dart'; import 'package:ml_algo/src/classifier/logistic_regressor/logistic_regressor.dart'; @@ -22,7 +23,10 @@ import 'package:ml_algo/src/linear_optimizer/linear_optimizer_factory.dart'; import 'package:ml_algo/src/link_function/link_function.dart'; import 'package:ml_algo/src/math/randomizer/randomizer.dart'; import 'package:ml_algo/src/math/randomizer/randomizer_factory.dart'; +import 'package:ml_algo/src/metric/metric.dart'; +import 'package:ml_algo/src/metric/metric_factory.dart'; import 'package:ml_algo/src/model_selection/assessable.dart'; +import 'package:ml_algo/src/model_selection/model_assessor/classifier_assessor.dart'; import 'package:ml_algo/src/model_selection/split_indices_provider/split_indices_provider.dart'; import 'package:ml_algo/src/model_selection/split_indices_provider/split_indices_provider_factory.dart'; import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor.dart'; @@ -43,9 +47,33 @@ import 'package:ml_algo/src/tree_trainer/splitter/numerical_splitter/numerical_s import 'package:ml_algo/src/tree_trainer/splitter/splitter.dart'; import 'package:ml_algo/src/tree_trainer/splitter/splitter_factory.dart'; import 'package:ml_algo/src/tree_trainer/tree_node/tree_node.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/matrix.dart'; +import 'package:ml_preprocessing/ml_preprocessing.dart'; import 'package:mockito/mockito.dart'; import 'package:test/test.dart'; +class MetricFactoryMock extends Mock implements MetricFactory {} + +class MetricMock extends Mock implements Metric {} + +class EncoderFactoryMock extends Mock { + Encoder create(DataFrame data, Iterable targetNames); +} + +class FeatureTargetSplitterMock extends Mock { + Iterable split(DataFrame samples, { + Iterable targetNames, + Iterable targetIndices, + }); +} + +class ClassLabelsNormalizerMock extends Mock { + Matrix normalize(Matrix classLabels, num positiveLabel, num negativeLabel); +} + +class EncoderMock extends Mock implements Encoder {} + class RandomizerFactoryMock extends Mock implements RandomizerFactory {} class RandomizerMock extends Mock implements Randomizer {} @@ -151,6 +179,10 @@ class SoftmaxRegressorMock extends Mock implements SoftmaxRegressor {} class SoftmaxRegressorFactoryMock extends Mock implements SoftmaxRegressorFactory {} +class ClassifierMock extends Mock implements Classifier {} + +class ClassifierAssessorMock extends Mock implements ClassifierAssessor {} + LearningRateGeneratorFactoryMock createLearningRateGeneratorFactoryMock( LearningRateGenerator generator) { final factory = LearningRateGeneratorFactoryMock(); diff --git a/test/model_selection/cross_validator/cross_validator_impl_test.dart b/test/model_selection/cross_validator/cross_validator_impl_test.dart index f894c9b8..73dc13cf 100644 --- a/test/model_selection/cross_validator/cross_validator_impl_test.dart +++ b/test/model_selection/cross_validator/cross_validator_impl_test.dart @@ -34,20 +34,18 @@ void main() { final metric = MetricType.mape; final splitter = createSplitter([[0,2,4],[6, 8]]); final predictor = AssessableMock(); - final validator = CrossValidatorImpl(allObservations, - ['target'], splitter, DType.float32); + final validator = CrossValidatorImpl(allObservations, splitter, + DType.float32); final score = 20.0; - when(predictor.assess(any, any, any)).thenReturn(score); + when(predictor.assess(any, any)).thenReturn(score); - final actual = await validator - .evaluate((observations, outcomes) => predictor, metric); + final actual = await validator.evaluate((_) => predictor, metric); expect(actual, [20, 20]); final verificationResult = verify( predictor.assess( captureThat(isNotNull), - argThat(equals(['target'])), metric, )); final firstAssessCallArgs = verificationResult.captured; @@ -80,12 +78,12 @@ void main() { final metric = MetricType.mape; final splitter = createSplitter([[0], [0], [0]]); final predictor = AssessableMock(); - final validator = CrossValidatorImpl(allObservations, - ['target'], splitter, DType.float32); + final validator = CrossValidatorImpl(allObservations, splitter, + DType.float32); - when(predictor.assess(any, any, any)).thenReturn(1); + when(predictor.assess(any, any)).thenReturn(1); - int iterationCounter = 0; + var iterationCounter = 0; final iterationToResponse = >{ 0: [ @@ -113,7 +111,7 @@ void main() { }; await validator.evaluate( - (observations, outcomes) { + (observations) { expect( observations.toMatrix(), equals(iterationToResponse[iterationCounter++][0].toMatrix()), @@ -140,12 +138,12 @@ void main() { final metric = MetricType.mape; final splitter = createSplitter([[0], [0], [0]]); final predictor = AssessableMock(); - final validator = CrossValidatorImpl(allObservations, - ['target'], splitter, DType.float32); + final validator = CrossValidatorImpl(allObservations, splitter, + DType.float32); - when(predictor.assess(any, any, any)).thenReturn(1); + when(predictor.assess(any, any)).thenReturn(1); - int iterationCounter = 0; + var iterationCounter = 0; final iterationToResponse = >{ 0: [ @@ -172,18 +170,14 @@ void main() { }; await validator.evaluate( - (observations, outcomes) => predictor, + (_) => predictor, metric, onDataSplit: (trainData, testData) => - iterationToResponse[iterationCounter++], + iterationToResponse[iterationCounter++], ); final verificationResult = verify( - predictor.assess( - captureThat(isNotNull), - argThat(equals(['target'])), - metric, - )); + predictor.assess(captureThat(isNotNull), metric)); final firstAssessCallArgs = verificationResult.captured; expect((firstAssessCallArgs[0] as DataFrame).rows, equals([ @@ -224,11 +218,11 @@ void main() { final splitter = createSplitter([[0], [2], [4]]); final predictor = AssessableMock(); final validator = CrossValidatorImpl(allObservations, - ['target'], splitter, DType.float32); + splitter, DType.float32); - when(predictor.assess(any, any, any)).thenReturn(1); + when(predictor.assess(any, any)).thenReturn(1); - int iterationCounter = 0; + var iterationCounter = 0; final iterationToSplits = { 0: { @@ -285,7 +279,7 @@ void main() { }; await validator.evaluate( - (observations, outcomes) => predictor, + (_) => predictor, metric, onDataSplit: (trainData, testData) { final expectedSplits = iterationToSplits[iterationCounter++]; @@ -325,10 +319,10 @@ void main() { final metric = MetricType.mape; final splitter = createSplitter([[0], [2], [4]]); final predictor = AssessableMock(); - final validator = CrossValidatorImpl(originalData, - ['target'], splitter, DType.float32); + final validator = CrossValidatorImpl(originalData, splitter, + DType.float32); - when(predictor.assess(any, any, any)).thenReturn(1); + when(predictor.assess(any, any)).thenReturn(1); final transformedTrainData = DataFrame([[1, 2]], headerExists: false); @@ -336,13 +330,13 @@ void main() { headerExists: false); final actual = () => validator.evaluate( - (observations, outcomes) => predictor, - metric, - onDataSplit: (trainData, testData) => - [ - transformedTrainData, - transformedTestData, - ], + (_) => predictor, + metric, + onDataSplit: (trainData, testData) => + [ + transformedTrainData, + transformedTestData, + ], ); expect(actual, throwsA(isA())); @@ -369,10 +363,10 @@ void main() { final metric = MetricType.mape; final splitter = createSplitter([[0], [2], [4]]); final predictor = AssessableMock(); - final validator = CrossValidatorImpl(originalData, - ['target'], splitter, DType.float32); + final validator = CrossValidatorImpl(originalData, splitter, + DType.float32); - when(predictor.assess(any, any, any)).thenReturn(1); + when(predictor.assess(any, any)).thenReturn(1); final transformedTrainData = DataFrame([[1, 2, 3, 4, 5]], headerExists: false); @@ -380,7 +374,7 @@ void main() { headerExists: false); final actual = () => validator.evaluate( - (observations, outcomes) => predictor, + (_) => predictor, metric, onDataSplit: (trainData, testData) => [ transformedTrainData, @@ -412,10 +406,10 @@ void main() { final metric = MetricType.mape; final splitter = createSplitter([[0], [2], [4]]); final predictor = AssessableMock(); - final validator = CrossValidatorImpl(originalData, - ['target'], splitter, DType.float32); + final validator = CrossValidatorImpl(originalData, splitter, + DType.float32); - when(predictor.assess(any, any, any)).thenReturn(1); + when(predictor.assess(any, any)).thenReturn(1); final transformedTrainData = DataFrame([[1, 2, 3, 4]], headerExists: false); @@ -423,7 +417,7 @@ void main() { headerExists: false); final actual = () => validator.evaluate( - (observations, outcomes) => predictor, + (_) => predictor, metric, onDataSplit: (trainData, testData) => [ transformedTrainData, @@ -455,10 +449,10 @@ void main() { final metric = MetricType.mape; final splitter = createSplitter([[0], [2], [4]]); final predictor = AssessableMock(); - final validator = CrossValidatorImpl(originalData, - ['target'], splitter, DType.float32); + final validator = CrossValidatorImpl(originalData, splitter, + DType.float32); - when(predictor.assess(any, any, any)).thenReturn(1); + when(predictor.assess(any, any)).thenReturn(1); final transformedTrainData = DataFrame([[1, 2, 3, 4]], headerExists: false); @@ -466,7 +460,7 @@ void main() { headerExists: false); final actual = () => validator.evaluate( - (observations, outcomes) => predictor, + (_) => predictor, metric, onDataSplit: (trainData, testData) => [ transformedTrainData, diff --git a/test/model_selection/cross_validator/cross_validator_test.dart b/test/model_selection/cross_validator/cross_validator_test.dart index f1af1915..d45a71f1 100644 --- a/test/model_selection/cross_validator/cross_validator_test.dart +++ b/test/model_selection/cross_validator/cross_validator_test.dart @@ -1,6 +1,5 @@ -import 'package:injector/injector.dart'; import 'package:ml_algo/ml_algo.dart'; -import 'package:ml_algo/src/di/injector.dart'; +import 'package:ml_algo/src/model_selection/_injector.dart'; import 'package:ml_algo/src/model_selection/split_indices_provider/split_indices_provider.dart'; import 'package:ml_algo/src/model_selection/split_indices_provider/split_indices_provider_factory.dart'; import 'package:ml_algo/src/model_selection/split_indices_provider/split_indices_provider_type.dart'; @@ -25,15 +24,14 @@ void main() { dataSplitter = DataSplitterMock(); dataSplitterFactory = createDataSplitterFactoryMock(dataSplitter); - injector = Injector() + modelSelectionInjector + ..clearAll() ..registerDependency(() => dataSplitterFactory); }); - tearDown(() => injector = null); - test('should create k-fold cross validator and pass number of folds ' 'parameter into data splitter factory', () { - CrossValidator.kFold(data, ['4'], numberOfFolds: 10); + CrossValidator.kFold(data, numberOfFolds: 10); verify(dataSplitterFactory .createByType(SplitIndicesProviderType.kFold, numberOfFolds: 10), @@ -42,7 +40,7 @@ void main() { test('should create k-fold cross validator and pass 5 as default value for ' 'number of folds parameter into data splitter factory', () { - CrossValidator.kFold(data, ['4']); + CrossValidator.kFold(data); verify(dataSplitterFactory .createByType(SplitIndicesProviderType.kFold, numberOfFolds: 5), @@ -51,7 +49,7 @@ void main() { test('should create leave-p-out cross validator and pass `p` parameter ' 'into data splitter factory', () { - CrossValidator.lpo(data, ['4'], 30); + CrossValidator.lpo(data, 30); verify(dataSplitterFactory .createByType(SplitIndicesProviderType.lpo, p: 30), diff --git a/test/model_selection/model_assessor/classifier_assessor_test.dart b/test/model_selection/model_assessor/classifier_assessor_test.dart new file mode 100644 index 00000000..07ff3e19 --- /dev/null +++ b/test/model_selection/model_assessor/classifier_assessor_test.dart @@ -0,0 +1,231 @@ +import 'dart:math' as math; + +import 'package:ml_algo/src/common/exception/invalid_metric_type_exception.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; +import 'package:ml_algo/src/model_selection/model_assessor/classifier_assessor.dart'; +import 'package:ml_dataframe/ml_dataframe.dart'; +import 'package:ml_linalg/dtype.dart'; +import 'package:mockito/mockito.dart'; +import 'package:test/test.dart'; + +import '../../mocks.dart'; + +void main() { + group('ClassifierAssessor', () { + final generator = math.Random(); + final metricFactoryMock = MetricFactoryMock(); + final metricMock = MetricMock(); + final encoderFactoryMock = EncoderFactoryMock(); + final encoderMock = EncoderMock(); + final featureTargetSplitterMock = FeatureTargetSplitterMock(); + final classLabelsNormalizerMock = ClassLabelsNormalizerMock(); + final assessor = ClassifierAssessor( + metricFactoryMock, + encoderFactoryMock.create, + featureTargetSplitterMock.split, + classLabelsNormalizerMock.normalize, + ); + final metricType = MetricType.precision; + final classifierMock = ClassifierMock(); + final featuresNames = ['feature_1', 'feature_2', 'feature_3']; + final targetNames = ['target_1', 'target_2', 'target_2']; + final samplesHeader = [...featuresNames, ...targetNames]; + final samples = DataFrame([ + [ 1, 33, -199, 1, 0, 0], + [-90002, 232, 889.20, 1, 0, 0], + [-12004, 19, 111, 0, 1, 0], + ], headerExists: false, header: samplesHeader); + final featuresMock = DataFrame([ + [ 1, 33, -199], + [-90002, 232, 889.20], + [-12004, 19, 111], + ], headerExists: false, header: featuresNames); + final targetMock = DataFrame([ + [1, 0, 0], + [1, 0, 0], + [0, 1, 0], + ], headerExists: false, header: targetNames); + final predictionMock = DataFrame([ + [0, 0, 1], + [0, 0, 1], + [1, 0, 0], + ], headerExists: false, header: targetNames); + final dtype = DType.float64; + final positiveLabel = 100; + final negativeLabel = -100; + + setUp(() { + when( + encoderFactoryMock.create( + argThat(anything), + argThat(anything), + ) + ).thenReturn(encoderMock); + when(classifierMock.dtype).thenReturn(dtype); + when( + classifierMock.targetNames, + ).thenReturn(targetNames); + when( + classifierMock.dtype, + ).thenReturn(DType.float64); + when( + featureTargetSplitterMock.split( + argThat(anything), + targetNames: anyNamed('targetNames'), + ) + ).thenReturn([featuresMock, targetMock]); + when( + classifierMock.predict( + argThat(anything), + ), + ).thenReturn(predictionMock); + when( + encoderMock.process( + argThat(anything), + ), + ).thenReturn(predictionMock); + when( + metricFactoryMock.createByType( + argThat(anything), + ), + ).thenReturn(metricMock); + }); + + tearDown(() { + reset(metricFactoryMock); + reset(metricMock); + reset(encoderFactoryMock); + reset(encoderMock); + reset(featureTargetSplitterMock); + reset(classLabelsNormalizerMock); + }); + + test('should throw an exception if improper metric type is provided', () { + final metricTypes = [MetricType.mape, MetricType.rmse]; + + metricTypes.forEach((metricType) { + final actual = () => assessor.assess(classifierMock, metricType, samples); + + expect(actual, throwsA(isA())); + }); + }); + + test('should create metric entity', () { + assessor.assess(classifierMock, metricType, samples); + + verify(metricFactoryMock.createByType(metricType)).called(1); + }); + + test('should encode predicted target column if it is not encoded', () { + when(classifierMock.targetNames).thenReturn(['target']); + + assessor.assess(classifierMock, metricType, samples); + + verify(encoderMock.process(predictionMock)).called(1); + }); + + test('should encode original target column if it is not encoded', () { + when(classifierMock.targetNames).thenReturn(['target']); + + assessor.assess(classifierMock, metricType, samples); + + verify(encoderMock.process(targetMock)).called(1); + }); + + test('should normalize predicted class labels if predefined labels for ' + 'positive and negative classes exist', () { + when(classifierMock.positiveLabel).thenReturn(positiveLabel); + when(classifierMock.negativeLabel).thenReturn(negativeLabel); + + assessor.assess(classifierMock, metricType, samples); + + verify( + classLabelsNormalizerMock.normalize( + predictionMock.toMatrix(dtype), positiveLabel, negativeLabel, + ), + ).called(1); + }); + + test('should not normalize predicted class labels if predefined labels for ' + 'positive and negative classes do not exist', () { + when(classifierMock.positiveLabel).thenReturn(null); + when(classifierMock.negativeLabel).thenReturn(null); + + assessor.assess(classifierMock, metricType, samples); + + verifyNever( + classLabelsNormalizerMock.normalize( + predictionMock.toMatrix(dtype), positiveLabel, negativeLabel, + ), + ); + }); + + test('should not normalize predicted class labels if at least one ' + 'predefined class label does not exist', () { + when(classifierMock.positiveLabel).thenReturn(positiveLabel); + when(classifierMock.negativeLabel).thenReturn(null); + + assessor.assess(classifierMock, metricType, samples); + + verifyNever( + classLabelsNormalizerMock.normalize( + predictionMock.toMatrix(dtype), positiveLabel, negativeLabel, + ), + ); + }); + + test('should normalize original class labels if predefined labels for ' + 'positive and negative classes exist', () { + when(classifierMock.positiveLabel).thenReturn(positiveLabel); + when(classifierMock.negativeLabel).thenReturn(negativeLabel); + + assessor.assess(classifierMock, metricType, samples); + + verify( + classLabelsNormalizerMock.normalize( + targetMock.toMatrix(dtype), positiveLabel, negativeLabel, + ), + ).called(1); + }); + + test('should not normalize original class labels if predefined labels for ' + 'positive and negative classes do not exist', () { + when(classifierMock.positiveLabel).thenReturn(null); + when(classifierMock.negativeLabel).thenReturn(null); + + assessor.assess(classifierMock, metricType, samples); + + verifyNever( + classLabelsNormalizerMock.normalize( + targetMock.toMatrix(dtype), positiveLabel, negativeLabel, + ), + ); + }); + + test('should not normalize original class labels if at least one ' + 'predefined class label does not exist', () { + when(classifierMock.positiveLabel).thenReturn(null); + when(classifierMock.negativeLabel).thenReturn(negativeLabel); + + assessor.assess(classifierMock, metricType, samples); + + verifyNever( + classLabelsNormalizerMock.normalize( + targetMock.toMatrix(dtype), positiveLabel, negativeLabel, + ), + ); + }); + + test('should return score', () { + final score = generator.nextDouble(); + + when( + metricMock.getScore(argThat(anything), argThat(anything)), + ).thenReturn(score); + + final actual = assessor.assess(classifierMock, metricType, samples); + + expect(actual, equals(score)); + }); + }); +} diff --git a/test/regressor/knn_regressor/knn_regressor_impl_test.dart b/test/regressor/knn_regressor/knn_regressor_impl_test.dart index 673b98de..ece38ca6 100644 --- a/test/regressor/knn_regressor/knn_regressor_impl_test.dart +++ b/test/regressor/knn_regressor/knn_regressor_impl_test.dart @@ -1,4 +1,7 @@ +import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/knn_solver/neigbour.dart'; +import 'package:ml_algo/src/metric/metric_type.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/_injector.dart'; import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor_impl.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; @@ -20,6 +23,9 @@ void main() { tearDown(() { reset(solver); reset(kernel); + + injector.clearAll(); + knnRegressorInjector.clearAll(); }); test('should throw an exception if no features are provided', () { diff --git a/test/regressor/knn_regressor/knn_regressor_integration_test.dart b/test/regressor/knn_regressor/knn_regressor_integration_test.dart index 0da8efc7..527e1607 100644 --- a/test/regressor/knn_regressor/knn_regressor_integration_test.dart +++ b/test/regressor/knn_regressor/knn_regressor_integration_test.dart @@ -1,4 +1,5 @@ import 'package:ml_algo/ml_algo.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/_helpers/create_knn_regressor.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/distance.dart'; import 'package:ml_linalg/matrix.dart'; @@ -60,8 +61,11 @@ void main() { [9.0, 9.0, 9.0, 9.0, 9.0], ]); - final regressor = KnnRegressor(data, 'target', k, - kernel: KernelType.epanechnikov); + final regressor = createKnnRegressor( + fittingData: data, + targetName: 'target', + k: k, + kernel: KernelType.epanechnikov); final actual = regressor.predict( DataFrame.fromMatrix(testFeatures), diff --git a/test/regressor/knn_regressor/knn_regressor_test.dart b/test/regressor/knn_regressor/knn_regressor_test.dart index 0a4bf536..a1de01e6 100644 --- a/test/regressor/knn_regressor/knn_regressor_test.dart +++ b/test/regressor/knn_regressor/knn_regressor_test.dart @@ -1,7 +1,6 @@ -import 'package:injector/injector.dart'; -import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/knn_kernel/kernel_type.dart'; -import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/_helpers/create_knn_regressor.dart'; +import 'package:ml_algo/src/regressor/knn_regressor/_injector.dart'; import 'package:ml_algo/src/regressor/knn_regressor/knn_regressor_factory.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/distance.dart'; @@ -29,17 +28,16 @@ void main() { final knnRegressor = KnnRegressorMock(); final knnRegressorFactory = createKnnRegressorFactoryMock(knnRegressor); - setUp(() => injector = Injector() + setUp(() => knnRegressorInjector + ..clearAll() ..registerSingleton(() => knnRegressorFactory), ); - tearDown(() => injector = null); - test('should call KnnRegressorFactory in order to create a regressor', () { - final regressor = KnnRegressor( - data, - targetName, - 2, + final regressor = createKnnRegressor( + fittingData: data, + targetName: targetName, + k: 2, kernel: KernelType.epanechnikov, distance: Distance.cosine, dtype: DType.float64, diff --git a/test/regressor/linear_regressor_integration_test.dart b/test/regressor/linear_regressor_integration_test.dart index 0cd2d4ea..03a06931 100644 --- a/test/regressor/linear_regressor_integration_test.dart +++ b/test/regressor/linear_regressor_integration_test.dart @@ -1,5 +1,6 @@ import 'dart:io'; +import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/regressor/linear_regressor/linear_regressor.dart'; import 'package:ml_algo/src/regressor/linear_regressor/linear_regressor_json_keys.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; @@ -43,6 +44,8 @@ void main() { if (await file.exists()) { await file.delete(); } + + injector.clearAll(); }); test('should serialize', () { diff --git a/test/regressor/linear_regressor_test.dart b/test/regressor/linear_regressor_test.dart index efe7238b..21d145b7 100644 --- a/test/regressor/linear_regressor_test.dart +++ b/test/regressor/linear_regressor_test.dart @@ -1,4 +1,3 @@ -import 'package:injector/injector.dart'; import 'package:ml_algo/ml_algo.dart'; import 'package:ml_algo/src/cost_function/cost_function.dart'; import 'package:ml_algo/src/cost_function/cost_function_factory.dart'; @@ -9,6 +8,7 @@ import 'package:ml_algo/src/linear_optimizer/linear_optimizer.dart'; import 'package:ml_algo/src/linear_optimizer/linear_optimizer_factory.dart'; import 'package:ml_algo/src/linear_optimizer/linear_optimizer_type.dart'; import 'package:ml_algo/src/linear_optimizer/regularization_type.dart'; +import 'package:ml_algo/src/regressor/linear_regressor/_helpers/create_linear_regressor.dart'; import 'package:ml_dataframe/ml_dataframe.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:ml_linalg/linalg.dart'; @@ -46,7 +46,7 @@ void main() { linearOptimizerFactoryMock = createLinearOptimizerFactoryMock( linearOptimizerMock); - injector = Injector() + injector ..registerDependency( () => costFunctionFactoryMock) ..registerDependency( @@ -60,19 +60,25 @@ void main() { when(linearOptimizerMock.costPerIteration).thenReturn(costPerIteration); }); - tearDownAll(() => injector = null); + tearDown(injector.clearAll); test('should throw an error if the target column does not exist', () { final targetColumn = 'absent_column'; expect( - () => LinearRegressor(observations, targetColumn), + () => createLinearRegressor( + fittingData: observations, + targetName: targetColumn, + ), throwsException, ); }); test('should call cost function factory in order to create ' 'squared cost function instance', () { - LinearRegressor(observations, 'target'); + createLinearRegressor( + fittingData: observations, + targetName: 'target', + ); verify(costFunctionFactoryMock.createByType( CostFunctionType.leastSquare, @@ -81,7 +87,9 @@ void main() { test('should call linear optimizer factory and consider intercept term ' 'while calling the factory', () { - LinearRegressor(observations, 'target', + createLinearRegressor( + fittingData: observations, + targetName: 'target', optimizerType: LinearOptimizerType.coordinate, iterationsLimit: 1000, initialLearningRate: 5, @@ -125,7 +133,9 @@ void main() { test('should find the extrema for fitting observations while ' 'instantiating', () { - LinearRegressor(observations, 'target', + createLinearRegressor( + fittingData: observations, + targetName: 'target', initialCoefficients: initialCoefficients, ); @@ -136,9 +146,9 @@ void main() { }); test('should predict values basing on learned coefficients', () { - final predictor = LinearRegressor( - observations, - 'target', + final predictor = createLinearRegressor( + fittingData: observations, + targetName: 'target', initialCoefficients: initialCoefficients, fitIntercept: true, interceptScale: 2.0, @@ -164,9 +174,9 @@ void main() { test('should collect cost values per iteration if collectLearningData is ' 'true', () { - final regressor = LinearRegressor( - observations, - 'target', + final regressor = createLinearRegressor( + fittingData: observations, + targetName: 'target', initialCoefficients: initialCoefficients, collectLearningData: true, ); @@ -181,9 +191,9 @@ void main() { test('should not collect cost values per iteration if collectLearningData is ' 'false', () { - LinearRegressor( - observations, - 'target', + createLinearRegressor( + fittingData: observations, + targetName: 'target', initialCoefficients: initialCoefficients, collectLearningData: false, ); diff --git a/test/tree_trainer/tree_trainer_integration_test.dart b/test/tree_trainer/tree_trainer_integration_test.dart index f9842e03..7832e4c6 100644 --- a/test/tree_trainer/tree_trainer_integration_test.dart +++ b/test/tree_trainer/tree_trainer_integration_test.dart @@ -1,3 +1,5 @@ +import 'package:ml_algo/src/classifier/decision_tree_classifier/_init_module.dart'; +import 'package:ml_algo/src/di/injector.dart'; import 'package:ml_algo/src/tree_trainer/_helpers/create_decision_tree_trainer.dart'; import 'package:ml_linalg/dtype.dart'; import 'package:test/test.dart'; @@ -8,6 +10,10 @@ import '../majority_tree_data_mock.dart'; void main() { group('TreeTrainer', () { group('DecisionTreeTrainer', () { + setUp(initDecisionTreeModule); + + tearDown(injector.clearAll); + test('should build a decision tree', () { final targetName = 'col_8'; final minErrorOnNode = 0.3; diff --git a/tool/grind.dart b/tool/grind.dart index 79aba4ee..4587aa22 100755 --- a/tool/grind.dart +++ b/tool/grind.dart @@ -1,12 +1,15 @@ import 'package:grinder/grinder.dart'; import 'package:ml_tech/ml_tech.dart' as ml_tech; +import 'run_e2e_tests.dart'; + Future main(List args) => grind(args); @Task() Future start() async { ml_tech.analyze(); await ml_tech.test(); + await runE2ETests(); } @Task() diff --git a/tool/run_e2e_tests.dart b/tool/run_e2e_tests.dart new file mode 100644 index 00000000..3cb78ecc --- /dev/null +++ b/tool/run_e2e_tests.dart @@ -0,0 +1,16 @@ +import 'package:process_run/shell.dart'; + +Future runE2ETests() => Shell().run( +''' + +echo Running e2e tests... + +pub run test e2e -p vm + +echo e2e tests finished + +'''); + +void main() async { + await runE2ETests(); +}