From becd58f365ddcf9e54422f7d962bd0c13073a88f Mon Sep 17 00:00:00 2001 From: Yael Dekel Date: Sat, 1 Dec 2018 15:17:49 -0800 Subject: [PATCH] Calibrator trainer needs to clear data so it can train another calibrator with new data --- .../Prediction/Calibrator.cs | 4 +- .../Common/OVA/OVA-CV-iris-out.txt | 58 +++++++ .../Common/OVA/OVA-CV-iris-rp.txt | 4 + .../BaselineOutput/Common/OVA/OVA-CV-iris.txt | 151 ++++++++++++++++++ .../Common/OVA/OVA-FastForest-CV-iris-out.txt | 100 ++++++++++++ .../Common/OVA/OVA-FastForest-CV-iris-rp.txt | 4 + .../Common/OVA/OVA-FastForest-CV-iris.txt | 151 ++++++++++++++++++ .../OVA/OVA-FastForest-TrainTest-iris-out.txt | 57 +++++++ .../OVA/OVA-FastForest-TrainTest-iris-rp.txt | 4 + .../OVA/OVA-FastForest-TrainTest-iris.txt | 151 ++++++++++++++++++ .../Common/OVA/OVA-TrainTest-iris-out.txt | 36 +++++ .../Common/OVA/OVA-TrainTest-iris-rp.txt | 4 + .../Common/OVA/OVA-TrainTest-iris.txt | 151 ++++++++++++++++++ .../Common/PKPD/PKPD-CV-iris-out.txt | 70 ++++++++ .../Common/PKPD/PKPD-CV-iris-rp.txt | 4 + .../Common/PKPD/PKPD-CV-iris.txt | 151 ++++++++++++++++++ .../Common/PKPD/PKPD-TrainTest-iris-out.txt | 42 +++++ .../Common/PKPD/PKPD-TrainTest-iris-rp.txt | 4 + .../Common/PKPD/PKPD-TrainTest-iris.txt | 151 ++++++++++++++++++ .../TestPredictors.cs | 11 ++ test/Microsoft.ML.TestFramework/Learners.cs | 19 +++ 21 files changed, 1326 insertions(+), 1 deletion(-) create mode 100644 test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-CV-iris.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-out.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-rp.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-out.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-rp.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt create mode 100644 test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt create mode 100644 test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs index 0e966a44dc..a44cf64753 100644 --- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs +++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs @@ -1112,7 +1112,9 @@ public bool ProcessTrainingExample(Float output, bool labelIs1, Float weight) public ICalibrator FinishTraining(IChannel ch) { ch.Check(Data != null, "Calibrator trained on zero instances."); - return CreateCalibrator(ch); + var calibrator = CreateCalibrator(ch); + Data = null; + return calibrator; } public abstract ICalibrator CreateCalibrator(IChannel ch); diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt new file mode 100644 index 0000000000..21874465e6 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris-out.txt @@ -0,0 +1,58 @@ +maml.exe CV tr=OVA{p=AvgPer{ lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 +Not adding a normalizer. +Training learner 0 +Training calibrator. +Training learner 1 +Training calibrator. +Training learner 2 +Training calibrator. +Not training a calibrator because it is not needed. +Not adding a normalizer. +Training learner 0 +Training calibrator. +Training learner 1 +Training calibrator. +Training learner 2 +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 21 | 0 | 0 | 1.0000 + 1 || 0 | 22 | 8 | 0.7333 + 2 || 0 | 0 | 28 | 1.0000 + ||======================== +Precision ||1.0000 |1.0000 |0.7778 | +Accuracy(micro-avg): 0.898734 +Accuracy(macro-avg): 0.911111 +Log-loss: 0.372620 +Log-loss reduction: 65.736556 + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 29 | 0 | 0 | 1.0000 + 1 || 0 | 18 | 2 | 0.9000 + 2 || 0 | 0 | 22 | 1.0000 + ||======================== +Precision ||1.0000 |1.0000 |0.9167 | +Accuracy(micro-avg): 0.971831 +Accuracy(macro-avg): 0.966667 +Log-loss: 0.357704 +Log-loss reduction: 67.051654 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.935283 (0.0365) +Accuracy(macro-avg): 0.938889 (0.0278) +Log-loss: 0.365162 (0.0075) +Log-loss reduction: 66.394105 (0.6575) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt new file mode 100644 index 0000000000..7f9d1ab2be --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris-rp.txt @@ -0,0 +1,4 @@ +OVA +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.935283 0.938889 0.365162 66.3941 AvgPer{lr=0.8} OVA %Data% %Output% 99 0 0 maml.exe CV tr=OVA{p=AvgPer{ lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} + diff --git a/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt b/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt new file mode 100644 index 0000000000..ea773d1bba --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-CV-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +5 0 0 0.043587643807703136 0.957348645 0.04264102 1.03425764E-05 0 1 2 +6 0 0 0.20844569128859777 0.8118451 0.188126311 2.8563165E-05 0 1 2 +8 0 0 0.44491771498326443 0.640877 0.359043151 7.987263E-05 0 1 2 +9 0 0 0.28103366767537485 0.7550029 0.244961023 3.60610429E-05 0 1 2 +10 0 0 0.064111239185181926 0.937900662 0.0620922744 7.04143076E-06 0 1 2 +11 0 0 0.19511668953898065 0.822738647 0.17722775 3.361244E-05 0 1 2 +18 0 0 0.040957067767296483 0.959870338 0.0401218459 7.82396E-06 0 1 2 +20 0 0 0.12310363986545093 0.884172 0.115805365 2.26346256E-05 0 1 2 +21 0 0 0.080695089616231355 0.9224749 0.07751174 1.33279436E-05 0 1 2 +25 0 0 0.30682306393325992 0.7357808 0.2641595 5.97413928E-05 0 1 2 +28 0 0 0.13141817305409223 0.876851 0.12313617 1.279574E-05 0 1 2 +31 0 0 0.10895984751128654 0.8967664 0.103215657 1.78892569E-05 0 1 2 +32 0 0 0.035477802883361699 0.965144157 0.0348526426 3.21791072E-06 0 1 2 +35 0 0 0.20274726386806977 0.8164846 0.183501333 1.40994789E-05 0 1 2 +37 0 0 0.28103366767537485 0.7550029 0.244961023 3.60610429E-05 0 1 2 +40 0 0 0.12298365201239185 0.8842781 0.115710318 1.158336E-05 0 1 2 +41 0 0 0.63401679194266458 0.5304568 0.4693291 0.000214140542 0 1 2 +44 0 0 0.077454598775344219 0.925469041 0.07449977 3.11931653E-05 0 1 2 +45 0 0 0.3215830979624606 0.7250004 0.274949133 5.0463128E-05 0 1 2 +46 0 0 0.072538640149662562 0.9300298 0.06995974 1.0462416E-05 0 1 2 +48 0 0 0.070505947028000213 0.9319222 0.0680698752 7.905172E-06 0 1 2 +50 1 1 0.97585559443809855 0.376869768 0.358608037 0.264522225 1 2 0 +51 1 1 0.820648723050456 0.440146029 0.3583106 0.201543346 1 2 0 +52 1 2 1.0835275133336952 0.5653485 0.3383997 0.0962518156 2 1 0 +54 1 2 0.75898112148691677 0.472428739 0.468143165 0.0594281144 2 1 0 +56 1 2 1.0174162545586878 0.5111817 0.36152783 0.127290443 2 1 0 +60 1 1 0.30253484094402477 0.738942742 0.209481522 0.051575724 1 2 0 +63 1 2 0.77949402405350943 0.499299049 0.458638 0.04206293 2 1 0 +64 1 1 0.45505022537231249 0.6344161 0.288069278 0.0775146 1 0 2 +66 1 1 0.7154835565078782 0.488955617 0.46298942 0.04805498 1 2 0 +68 1 1 0.68322766519277334 0.504984438 0.482306838 0.0127087329 1 2 0 +69 1 1 0.31084089328775633 0.732830465 0.183353335 0.08381622 1 2 0 +70 1 2 1.1682944017762613 0.6530067 0.310896754 0.0360965841 2 1 0 +71 1 1 0.43377030209255479 0.6480611 0.185485169 0.166453749 1 0 2 +72 1 2 0.88766165771254424 0.578003168 0.41161713 0.0103796953 2 1 0 +73 1 1 0.66368933400718488 0.514948 0.4451455 0.03990646 1 2 0 +74 1 1 0.54404239638263385 0.5803973 0.245238408 0.174364269 1 2 0 +76 1 2 0.84677727980192752 0.5165659 0.4287946 0.0546395145 2 1 0 +77 1 2 1.2789386167391619 0.688494444 0.278332561 0.0331729874 2 1 0 +79 1 1 0.34033011681215469 0.7115354 0.2354252 0.0530394167 1 0 2 +82 1 1 0.35118632841443026 0.7038526 0.15209128 0.144056112 1 2 0 +88 1 1 0.4571578145475656 0.6330804 0.217808262 0.149111286 1 2 0 +90 1 1 0.54303381152243435 0.580983 0.390706122 0.0283109024 1 2 0 +91 1 1 0.7255881783753686 0.484039783 0.444033325 0.0719268844 1 2 0 +92 1 1 0.35286862388238727 0.7026695 0.20336625 0.09396427 1 2 0 +93 1 1 0.24150358472221847 0.785446 0.122569308 0.0919846743 1 0 2 +95 1 1 0.45747345580807686 0.6328806 0.223053813 0.144065529 1 2 0 +96 1 1 0.46692162584127184 0.6269292 0.2688824 0.1041884 1 2 0 +97 1 1 0.52181706235134551 0.593441248 0.265519917 0.14103885 1 2 0 +98 1 1 0.33964763199167614 0.7120212 0.255649149 0.03232969 1 0 2 +99 1 1 0.42298578084071409 0.655087948 0.2430654 0.10184665 1 2 0 +100 2 2 0.13591733259440952 0.8729148 0.125132382 0.00195282849 2 1 0 +102 2 2 0.13809510857610402 0.871015847 0.125785753 0.00319840666 2 1 0 +104 2 2 0.19932133588014422 0.8192866 0.178226635 0.00248679356 2 1 0 +105 2 2 0.09978434131070596 0.9050326 0.09390649 0.00106095837 2 1 0 +106 2 2 0.65516062299283195 0.519358635 0.4732639 0.00737748668 2 1 0 +108 2 2 0.36038464423836569 0.697408 0.300992548 0.00159944966 2 1 0 +109 2 2 0.042800052177573163 0.958102942 0.03757144 0.00432561943 2 1 0 +111 2 2 0.33893424257144178 0.7125293 0.282670647 0.004800048 2 1 0 +112 2 2 0.17819193567707683 0.8367818 0.156975582 0.006242614 2 1 0 +113 2 2 0.49781014911918742 0.6078603 0.388630718 0.00350892986 2 1 0 +115 2 2 0.1683952699484349 0.845019758 0.146008313 0.008971939 2 1 0 +117 2 2 0.023365514010699712 0.976905346 0.02015102 0.00294363522 2 1 0 +120 2 2 0.11133724227002473 0.894637 0.100207157 0.005155826 2 1 0 +121 2 2 0.43666882240878063 0.6461854 0.346793234 0.007021348 2 1 0 +122 2 2 0.13629671282280101 0.8725837 0.126684025 0.000732263 2 1 0 +123 2 2 0.4310483662194341 0.6498275 0.338038325 0.0121342046 2 1 0 +125 2 2 0.11330052370098145 0.8928823 0.101871319 0.0052463985 2 1 0 +128 2 2 0.27949760674881013 0.756163538 0.2411889 0.00264759478 2 1 0 +129 2 2 0.17530740569786113 0.839199 0.153847516 0.006953467 2 1 0 +131 2 2 0.031839393778411017 0.968662143 0.0223613773 0.008976495 2 1 0 +132 2 2 0.27137481365798816 0.7623307 0.235219285 0.00245000049 2 1 0 +133 2 2 0.43700297433440277 0.6459695 0.341389537 0.01264096 2 1 0 +137 2 2 0.23063259534491895 0.794031143 0.198468238 0.00750062242 2 1 0 +138 2 2 0.43845130281190237 0.6450346 0.3309319 0.0240335166 2 1 0 +141 2 2 0.17626166414917829 0.8383986 0.142890155 0.018711295 2 1 0 +144 2 2 0.099717233123952864 0.9050933 0.09041383 0.00449282629 2 1 0 +145 2 2 0.18787613378173548 0.828717351 0.161682889 0.009599784 2 1 0 +147 2 2 0.24798062433245444 0.780375063 0.20853655 0.01108838 2 1 0 +0 0 0 0.34881132522048625 0.705526233 0.294473559 1.92144441E-07 0 1 2 +1 0 0 0.36141580969752651 0.696689248 0.303309947 7.8389877E-07 0 1 2 +2 0 0 0.35568660847624228 0.7006922 0.299307227 5.929496E-07 0 1 2 +3 0 0 0.36470718348091719 0.694399953 0.30559817 1.84990029E-06 0 1 2 +4 0 0 0.34775770739677259 0.70627 0.293729782 2.00147142E-07 0 1 2 +7 0 0 0.35382023048081196 0.702001154 0.297998428 4.17606344E-07 0 1 2 +12 0 0 0.36098727532383801 0.696987867 0.303011417 7.32556146E-07 0 1 2 +13 0 0 0.35788558263546733 0.699153066 0.300846159 7.88259E-07 0 1 2 +14 0 0 0.33437356737542145 0.715786338 0.284213632 7.41558059E-09 0 1 2 +15 0 0 0.33259729807630167 0.7170589 0.2829411 2.302074E-08 0 1 2 +16 0 0 0.33963038748907248 0.712033451 0.2879665 5.7538923E-08 0 1 2 +17 0 0 0.34952968472792562 0.7050196 0.294980139 2.583559E-07 0 1 2 +19 0 0 0.34579385759256209 0.70765835 0.292341441 2.096021E-07 0 1 2 +22 0 0 0.34605819997965914 0.7074713 0.29252857 1.20912986E-07 0 1 2 +23 0 0 0.36811690986051288 0.6920363 0.307961673 2.02298725E-06 0 1 2 +24 0 0 0.37119922981165249 0.6899065 0.310090721 2.80658514E-06 0 1 2 +26 0 0 0.35941763729273518 0.698082745 0.301916152 1.08453048E-06 0 1 2 +27 0 0 0.35009366263991337 0.7046221 0.295377672 2.18394433E-07 0 1 2 +29 0 0 0.36473963008629695 0.6943774 0.305620819 1.74348168E-06 0 1 2 +30 0 0 0.36694890288891646 0.692845047 0.307153255 1.67791779E-06 0 1 2 +33 0 0 0.33532989874849606 0.715102136 0.284897834 2.06016182E-08 0 1 2 +34 0 0 0.36074853902438908 0.6971543 0.302845 6.899852E-07 0 1 2 +36 0 0 0.3442039710581144 0.708784342 0.2912156 5.37456621E-08 0 1 2 +38 0 0 0.36249420192434484 0.695938349 0.304059923 1.72375007E-06 0 1 2 +39 0 0 0.35302378267720547 0.7025605 0.2974392 3.31980715E-07 0 1 2 +42 0 0 0.35802404250832931 0.699056268 0.30094254 1.18013247E-06 0 1 2 +43 0 0 0.35964093968844252 0.6979269 0.3020715 1.62606943E-06 0 1 2 +47 0 0 0.35894549345005311 0.6984124 0.301586539 1.06725963E-06 0 1 2 +49 0 0 0.35354270878931848 0.702196 0.2978036 3.52685419E-07 0 1 2 +53 1 1 0.43814530313713385 0.645232 0.30703035 0.0477376431 1 2 0 +55 1 1 0.52991693789558192 0.588653862 0.3812489 0.0300972275 1 2 0 +57 1 1 0.3144098829942828 0.730219662 0.222309768 0.047470592 1 0 2 +58 1 1 0.16338480577647163 0.8492643 0.10643021 0.0443054661 1 2 0 +59 1 1 0.43732800775193598 0.6457596 0.2802832 0.0739572 1 2 0 +61 1 1 0.26826825521335035 0.7647026 0.165891945 0.06940546 1 2 0 +62 1 1 0.16519114388964468 0.84773165 0.07620503 0.0760633051 1 2 0 +65 1 1 0.12274628026245138 0.884488046 0.0737581253 0.0417538173 1 0 2 +67 1 1 0.18131529311314101 0.8341723 0.09860581 0.0672218949 1 0 2 +75 1 1 0.13597202822637602 0.872867048 0.06550142 0.0616315342 1 0 2 +78 1 1 0.45281685214847861 0.6358346 0.332585216 0.0315802023 1 2 0 +80 1 1 0.23111914057709354 0.7936449 0.105801627 0.10055349 1 0 2 +81 1 1 0.21336965456040224 0.807857454 0.137252137 0.0548904277 1 0 2 +83 1 2 1.1418187241491273 0.6765539 0.3192379 0.00420819456 2 1 0 +84 1 2 0.79904921193122347 0.5272309 0.449756384 0.0230127368 2 1 0 +85 1 1 0.32376577391953759 0.723419666 0.223107859 0.05347247 1 2 0 +86 1 1 0.19858499419365563 0.8198901 0.142534047 0.037575867 1 2 0 +87 1 1 0.32077518585346926 0.725586355 0.24517718 0.0292364415 1 2 0 +89 1 1 0.36379979176298916 0.695030332 0.242874637 0.06209502 1 2 0 +94 1 1 0.37955450575815275 0.684166133 0.262924 0.0529098734 1 2 0 +101 2 2 0.34078617573620174 0.711210966 0.28588894 0.002900116 2 1 0 +103 2 2 0.33454245635946434 0.71566546 0.282688916 0.00164566189 2 1 0 +107 2 2 0.32176538137222749 0.724868238 0.27461502 0.000516714761 2 1 0 +110 2 2 0.4735643051120339 0.622778535 0.371121377 0.006100062 2 1 0 +114 2 2 0.3247388954422577 0.722716033 0.2752217 0.00206224318 2 1 0 +116 2 2 0.36482478597312679 0.6943183 0.3033064 0.00237530912 2 1 0 +118 2 2 0.30234231913723036 0.739085 0.260826528 8.847632E-05 2 1 0 +119 2 2 0.37792268388420569 0.6852835 0.3114479 0.0032686044 2 1 0 +124 2 2 0.33777190179266953 0.713358 0.284956157 0.00168584171 2 1 0 +126 2 2 0.51721400727433164 0.5961792 0.395325035 0.008495758 2 1 0 +127 2 2 0.48223827874761288 0.617399931 0.374503255 0.008096788 2 1 0 +130 2 2 0.33112825051245398 0.718113065 0.281247973 0.0006389589 2 1 0 +134 2 2 0.34240991810493487 0.7100571 0.288253874 0.00168902089 2 1 0 +135 2 2 0.3238549270121831 0.7233552 0.2760729 0.0005719304 2 1 0 +136 2 2 0.31869296062169472 0.727098763 0.271312952 0.00158829393 2 1 0 +139 2 2 0.39792518591800413 0.6717123 0.325529337 0.002758371 2 1 0 +140 2 2 0.32133155702629967 0.7251828 0.273566216 0.0012509838 2 1 0 +142 2 2 0.34078617573620174 0.711210966 0.28588894 0.002900116 2 1 0 +143 2 2 0.31559105025409723 0.72935766 0.269767135 0.0008751799 2 1 0 +146 2 2 0.3760214987664387 0.6865876 0.310142934 0.00326948427 2 1 0 +148 2 2 0.3305544580259554 0.718525231 0.2789816 0.00249314215 2 1 0 +149 2 2 0.37408822283240173 0.6879162 0.307514042 0.00456974143 2 1 0 diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-out.txt new file mode 100644 index 0000000000..1a5da994b3 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-out.txt @@ -0,0 +1,100 @@ +maml.exe CV tr=OVA{p=FastForest{ }} threads=- norm=No dout=%Output% data=%Data% seed=1 +Not adding a normalizer. +Training learner 0 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 71 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 2 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 1 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 71 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 3 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 2 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 71 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 1 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Not training a calibrator because it is not needed. +Not adding a normalizer. +Training learner 0 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 79 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 2 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 1 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 79 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 3 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 2 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 79 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 1 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 21 | 0 | 0 | 1.0000 + 1 || 0 | 25 | 5 | 0.8333 + 2 || 0 | 1 | 27 | 0.9643 + ||======================== +Precision ||1.0000 |0.9615 |0.8438 | +Accuracy(micro-avg): 0.924051 +Accuracy(macro-avg): 0.932540 +Log-loss: 0.197783 +Log-loss reduction: 81.813342 + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 29 | 0 | 0 | 1.0000 + 1 || 0 | 19 | 1 | 0.9500 + 2 || 0 | 2 | 20 | 0.9091 + ||======================== +Precision ||1.0000 |0.9048 |0.9524 | +Accuracy(micro-avg): 0.957746 +Accuracy(macro-avg): 0.953030 +Log-loss: 0.103360 +Log-loss reduction: 90.479422 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.940899 (0.0168) +Accuracy(macro-avg): 0.942785 (0.0102) +Log-loss: 0.150571 (0.0472) +Log-loss reduction: 86.146382 (4.3330) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-rp.txt new file mode 100644 index 0000000000..56ab2aa973 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris-rp.txt @@ -0,0 +1,4 @@ +OVA +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.940899 0.942785 0.150571 86.14639 FastForest{} OVA %Data% %Output% 99 0 0 maml.exe CV tr=OVA{p=FastForest{ }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:FastForest{} + diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris.txt new file mode 100644 index 0000000000..8e1c60c346 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-CV-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +5 0 0 0.027350379370839409 0.973020256 0.0148040019 0.012175764 0 2 1 +6 0 0 0.023177365350252416 0.977089167 0.01466024 0.008250585 0 2 1 +8 0 0 0.03879484521448328 0.961948037 0.0225592032 0.0154927764 0 1 2 +9 0 0 0.026096982166513804 0.9742406 0.0147748869 0.01098449 0 2 1 +10 0 0 0.027350379370839409 0.973020256 0.0148040019 0.012175764 0 2 1 +11 0 0 0.023212076237087643 0.977055252 0.0146819549 0.008262806 0 2 1 +18 0 0 0.028815318823320137 0.9715959 0.0160096437 0.0123944618 0 2 1 +20 0 0 0.027350379370839409 0.973020256 0.0148040019 0.012175764 0 2 1 +21 0 0 0.023288456631253712 0.9769806 0.0146841714 0.008335201 0 2 1 +25 0 0 0.028704838697499659 0.971703231 0.0150171826 0.0132795852 0 2 1 +28 0 0 0.024710550769104819 0.975592256 0.0147331525 0.009674597 0 2 1 +31 0 0 0.027350379370839409 0.973020256 0.0148040019 0.012175764 0 2 1 +32 0 0 0.024756068218349039 0.97554785 0.0147599624 0.009692201 0 2 1 +35 0 0 0.023464117069036436 0.976809 0.0147707965 0.008420168 0 2 1 +37 0 0 0.026096982166513804 0.9742406 0.0147748869 0.01098449 0 2 1 +40 0 0 0.023182306545303632 0.977084339 0.01466333 0.008252324 0 2 1 +41 0 0 0.075381369255567446 0.927389741 0.0563764051 0.0162338577 0 1 2 +44 0 0 0.023288456631253712 0.9769806 0.0146841714 0.008335201 0 2 1 +45 0 0 0.028697907258661438 0.971709967 0.0150136231 0.0132764373 0 2 1 +46 0 0 0.023288456631253712 0.9769806 0.0146841714 0.008335201 0 2 1 +48 0 0 0.024756068218349039 0.97554785 0.0147599624 0.009692201 0 2 1 +50 1 1 0.063133538332774491 0.9388181 0.03674654 0.02443534 1 2 0 +51 1 1 0.063273160443441889 0.938687 0.036751762 0.0245612152 1 2 0 +52 1 2 1.5448652812576653 0.7616039 0.21334061 0.02505552 2 1 0 +54 1 1 0.058659395969803671 0.9430279 0.03797895 0.0189931467 1 2 0 +56 1 1 0.062932806186675683 0.939006567 0.0365141444 0.0244792849 1 2 0 +60 1 1 0.10446634255003935 0.9008051 0.08302587 0.0161690265 1 0 2 +63 1 1 0.05614180204487236 0.945405066 0.0355887674 0.0190061387 1 2 0 +64 1 1 0.046482192945595177 0.954581559 0.0288921539 0.0165262986 1 0 2 +66 1 1 0.04066444950323992 0.960151255 0.0232533664 0.016595358 1 0 2 +68 1 1 0.056871265867723821 0.9447157 0.03689458 0.0183897614 1 2 0 +69 1 1 0.039875310060784264 0.960909247 0.0222220439 0.0168687385 1 0 2 +70 1 2 2.2263563135006348 0.867944062 0.107920945 0.0241349712 2 1 0 +71 1 1 0.059013852579431925 0.9426937 0.0366258025 0.0206804741 1 2 0 +72 1 2 1.0876589583679515 0.645501137 0.3370045 0.0174943563 2 1 0 +73 1 1 0.056708184586295521 0.944869757 0.0361831523 0.01894711 1 2 0 +74 1 1 0.057253869975114946 0.9443543 0.0349945128 0.02065122 1 2 0 +76 1 2 0.89537371392369014 0.5719312 0.408454925 0.0196138732 2 1 0 +77 1 2 1.9303219693474667 0.831519544 0.145101473 0.0233789887 2 1 0 +79 1 1 0.05999253183033975 0.941771567 0.0405655652 0.01766284 1 0 2 +82 1 1 0.050023969260699123 0.9512066 0.02679572 0.02199766 1 2 0 +88 1 1 0.045065919607380718 0.955934465 0.02764862 0.0164169129 1 0 2 +90 1 1 0.03943592815149323 0.961331546 0.02179231 0.0168761518 1 0 2 +91 1 1 0.058344871168396539 0.943324566 0.0352619849 0.0214134734 1 2 0 +92 1 1 0.050023969260699123 0.9512066 0.02679572 0.02199766 1 2 0 +93 1 1 0.10446634255003935 0.9008051 0.08302587 0.0161690265 1 0 2 +95 1 1 0.044750654086148499 0.9562359 0.0272751376 0.0164890066 1 0 2 +96 1 1 0.040619629909546248 0.9601943 0.02274274 0.0170629937 1 0 2 +97 1 1 0.057039737516047594 0.944556534 0.0348673128 0.0205761548 1 2 0 +98 1 1 0.10393389655676435 0.9012849 0.0826243 0.0160908233 1 0 2 +99 1 1 0.042296455668819607 0.95858556 0.0228082649 0.0186061822 1 0 2 +100 2 2 0.04325137116698035 0.9576706 0.0220750477 0.0202543456 2 0 1 +102 2 2 0.042999956444440428 0.957911432 0.02217581 0.01991274 2 1 0 +104 2 2 0.042999956444440428 0.957911432 0.02217581 0.01991274 2 1 0 +105 2 2 0.042999956444440428 0.957911432 0.02217581 0.01991274 2 1 0 +106 2 1 4.0557668267792941 0.9352778 0.04739997 0.017322192 1 0 2 +108 2 2 0.070150335263153035 0.932253659 0.04979825 0.0179480817 2 1 0 +109 2 2 0.043858138364186193 0.9570897 0.0223862287 0.0205240529 2 0 1 +111 2 2 0.070161652013398079 0.9322431 0.0498059951 0.0179508738 2 1 0 +112 2 2 0.042999956444440428 0.957911432 0.02217581 0.01991274 2 1 0 +113 2 2 0.11690579430486296 0.889669 0.09017117 0.0201598033 2 1 0 +115 2 2 0.043217015723945179 0.957703531 0.0220578834 0.0202385988 2 0 1 +117 2 2 0.043858138364186193 0.9570897 0.0223862287 0.0205240529 2 0 1 +120 2 2 0.043209920725147559 0.9577103 0.02205432 0.02023533 2 0 1 +121 2 2 0.098561098118739199 0.9061403 0.0728506148 0.02100908 2 1 0 +122 2 2 0.0471715874197976 0.9539237 0.0277284756 0.0183478333 2 1 0 +123 2 2 0.099357467789256262 0.905419 0.0767814144 0.0177995767 2 1 0 +125 2 2 0.043416567622914827 0.957512438 0.0220497642 0.0204377677 2 0 1 +128 2 2 0.047179147971301752 0.9539165 0.0277327932 0.01835069 2 1 0 +129 2 2 0.16789945151543201 0.845438838 0.13190572 0.0226554666 2 1 0 +131 2 2 0.043858138364186193 0.9570897 0.0223862287 0.0205240529 2 0 1 +132 2 2 0.047179147971301752 0.9539165 0.0277327932 0.01835069 2 1 0 +133 2 2 0.20824894842479216 0.812004864 0.168109149 0.0198859684 2 1 0 +137 2 2 0.044668440799718316 0.9563145 0.02215964 0.0215258449 2 1 0 +138 2 2 0.15583318384491099 0.8557019 0.124324195 0.0199738927 2 1 0 +141 2 2 0.044522979110667826 0.9564536 0.0219786763 0.0215677191 2 1 0 +144 2 2 0.043244151446937412 0.957677543 0.0220714156 0.0202510133 2 0 1 +145 2 2 0.043081348153097068 0.957833469 0.0222168975 0.0199496336 2 1 0 +147 2 2 0.043081348153097068 0.957833469 0.0222168975 0.0199496336 2 1 0 +0 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +1 0 0 0.028724345128365684 0.9716843 0.01679263 0.0115231182 0 2 1 +2 0 0 0.028387329965105464 0.9720118 0.01713079 0.0108573884 0 2 1 +3 0 0 0.028429764932604316 0.971970558 0.01715604 0.0108733922 0 2 1 +4 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +7 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +12 0 0 0.028724345128365684 0.9716843 0.01679263 0.0115231182 0 2 1 +13 0 0 0.028724345128365684 0.9716843 0.01679263 0.0115231182 0 2 1 +14 0 0 0.043610929364968049 0.957326353 0.0244053546 0.0182683 0 1 2 +15 0 0 0.042852621944898722 0.9580526 0.0241116975 0.0178357288 0 1 2 +16 0 0 0.024252864268300027 0.9760389 0.0162198264 0.007741274 0 2 1 +17 0 0 0.024252864268300027 0.9760389 0.0162198264 0.007741274 0 2 1 +19 0 0 0.024252864268300027 0.9760389 0.0162198264 0.007741274 0 2 1 +22 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +23 0 0 0.027786259158249255 0.9725962 0.0171605088 0.0102432566 0 2 1 +24 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +26 0 0 0.024252864268300027 0.9760389 0.0162198264 0.007741274 0 2 1 +27 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +29 0 0 0.028387329965105464 0.9720118 0.01713079 0.0108573884 0 2 1 +30 0 0 0.028429764932604316 0.971970558 0.01715604 0.0108733922 0 2 1 +33 0 0 0.030899673422883547 0.969572842 0.0168408789 0.013586306 0 2 1 +34 0 0 0.028429764932604316 0.971970558 0.01715604 0.0108733922 0 2 1 +36 0 0 0.030899673422883547 0.969572842 0.0168408789 0.013586306 0 2 1 +38 0 0 0.028724345128365684 0.9716843 0.01679263 0.0115231182 0 2 1 +39 0 0 0.024228559539270302 0.9760626 0.01620378 0.00773361558 0 2 1 +42 0 0 0.028387329965105464 0.9720118 0.01713079 0.0108573884 0 2 1 +43 0 0 0.024252864268300027 0.9760389 0.0162198264 0.007741274 0 2 1 +47 0 0 0.028387329965105464 0.9720118 0.01713079 0.0108573884 0 2 1 +49 0 0 0.027755249905110149 0.9726264 0.0171416216 0.0102319829 0 2 1 +53 1 1 0.037556735021321959 0.9631398 0.0201520827 0.0167081431 1 2 0 +55 1 1 0.044305386985996155 0.956661761 0.0299111586 0.0134270638 1 2 0 +57 1 1 0.093536483034406898 0.9107048 0.07308376 0.0162114669 1 0 2 +58 1 1 0.061044837158525642 0.940781057 0.04666032 0.012558599 1 2 0 +59 1 1 0.057238090893367552 0.9443692 0.0346155576 0.02101522 1 0 2 +61 1 1 0.045384715012467255 0.955629766 0.0309758075 0.0133944545 1 2 0 +62 1 1 0.033237106686933228 0.9673092 0.01762867 0.0150621245 1 2 0 +65 1 1 0.05778179219177005 0.9438559 0.04312986 0.01301424 1 2 0 +67 1 1 0.033103587181904308 0.96743834 0.0176091753 0.0149524612 1 2 0 +75 1 1 0.058650104789816478 0.9430367 0.0440750532 0.0128882658 1 2 0 +78 1 1 0.056192871154629086 0.9453568 0.04159324 0.0130499722 1 2 0 +80 1 1 0.036130172143532695 0.964514732 0.0192673262 0.0162179433 1 0 2 +81 1 1 0.036130172143532695 0.964514732 0.0192673262 0.0162179433 1 0 2 +83 1 2 1.2480745375737912 0.69912 0.287056983 0.013823038 2 1 0 +84 1 1 0.083086720865945554 0.920271337 0.04498237 0.0347462669 1 2 0 +85 1 1 0.17039047641137059 0.84333545 0.141331524 0.0153330155 1 2 0 +86 1 1 0.080997463473013107 0.922196031 0.06443846 0.0133655285 1 2 0 +87 1 1 0.051908113898915885 0.9494161 0.03799608 0.01258784 1 2 0 +89 1 1 0.037556735021321959 0.9631398 0.0201520827 0.0167081431 1 2 0 +94 1 1 0.035086091646095253 0.9655223 0.0208149366 0.0136627667 1 2 0 +101 2 2 0.049619880098472589 0.9515911 0.0341902152 0.014218702 2 1 0 +103 2 2 0.03226036779292963 0.968254447 0.018891599 0.0128539279 2 1 0 +107 2 2 0.027233200921188156 0.9731343 0.0140761612 0.0127895456 2 1 0 +110 2 2 0.028649511205966538 0.971757 0.015087897 0.0131550925 2 1 0 +114 2 2 0.047580126549381846 0.953534067 0.03241034 0.0140556078 2 1 0 +116 2 2 0.026689263578566873 0.973663747 0.0134749562 0.0128613133 2 1 0 +118 2 2 0.024777391870297136 0.975527048 0.0127499122 0.0117230108 2 0 1 +119 2 1 1.7884441874760408 0.819463134 0.167220131 0.0133167468 1 2 0 +124 2 2 0.022308948384583055 0.977938056 0.0128816022 0.009180347 2 0 1 +126 2 2 0.58862166918290049 0.555091858 0.433407664 0.0115004806 2 1 0 +127 2 2 0.36319554960125283 0.6954504 0.293058 0.011491593 2 1 0 +130 2 2 0.026158164658917963 0.974181 0.0130336434 0.0127853788 2 1 0 +134 2 1 0.71868836890731069 0.493089527 0.4873911 0.019519357 1 2 0 +135 2 2 0.02241305521632829 0.977836251 0.0127510056 0.0094127655 2 0 1 +136 2 2 0.026674020694690079 0.9736786 0.0157151483 0.0106062358 2 0 1 +139 2 2 0.024212560266198765 0.9760782 0.0129676694 0.010954137 2 0 1 +140 2 2 0.023236661302344995 0.977031231 0.0128853433 0.0100834481 2 0 1 +142 2 2 0.049619880098472589 0.9515911 0.0341902152 0.014218702 2 1 0 +143 2 2 0.022719770126189896 0.9775364 0.012872614 0.009590993 2 0 1 +146 2 2 0.17992962432883589 0.835329 0.1525824 0.0120885922 2 1 0 +148 2 2 0.033195268246711006 0.967349648 0.0165074 0.0161429718 2 0 1 +149 2 2 0.04654326171654332 0.954523265 0.03170268 0.0137740513 2 1 0 diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-out.txt new file mode 100644 index 0000000000..685dbd1c35 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-out.txt @@ -0,0 +1,57 @@ +maml.exe TrainTest test=%Data% tr=OVA{p=FastForest{ }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 +Not adding a normalizer. +Training learner 0 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 150 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 2 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 1 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 150 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 3 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Training learner 2 +Making per-feature arrays +Changing data from row-wise to column-wise +Processed 150 instances +Binning and forming Feature objects +Reserved memory for tree learner: %Number% bytes +Starting to train ... +Warning: 1 of the boosting iterations failed to grow a tree. This is commonly because the minimum documents in leaf hyperparameter was set too high for this dataset. +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 50 | 0 | 0 | 1.0000 + 1 || 0 | 48 | 2 | 0.9600 + 2 || 0 | 2 | 48 | 0.9600 + ||======================== +Precision ||1.0000 |0.9600 |0.9600 | +Accuracy(micro-avg): 0.973333 +Accuracy(macro-avg): 0.973333 +Log-loss: 0.088201 +Log-loss reduction: 91.971614 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.973333 (0.0000) +Accuracy(macro-avg): 0.973333 (0.0000) +Log-loss: 0.088201 (0.0000) +Log-loss reduction: 91.971614 (0.0000) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-rp.txt new file mode 100644 index 0000000000..ccedd71525 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris-rp.txt @@ -0,0 +1,4 @@ +OVA +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.973333 0.973333 0.088201 91.97161 FastForest{} OVA %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=OVA{p=FastForest{ }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:FastForest{} + diff --git a/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris.txt b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris.txt new file mode 100644 index 0000000000..f6208a78c9 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-FastForest-TrainTest-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +0 0 0 0.017464080270232062 0.982687533 0.0105327293 0.006779738 0 2 1 +1 0 0 0.019626027175872099 0.9805653 0.0113047613 0.008129944 0 2 1 +2 0 0 0.018428955784221195 0.9817398 0.0112173269 0.00704283174 0 2 1 +3 0 0 0.018512197155923133 0.9816581 0.0112163927 0.00712551735 0 2 1 +4 0 0 0.017492831024486424 0.9826593 0.0106203947 0.006720314 0 2 1 +5 0 0 0.01899702913087822 0.9811823 0.0107108289 0.008106917 0 2 1 +6 0 0 0.017808415781631996 0.9823492 0.0106611857 0.00698957127 0 2 1 +7 0 0 0.017417437872362801 0.982733369 0.0104801161 0.006786526 0 2 1 +8 0 0 0.040166580610096203 0.9606294 0.0283070058 0.0110635748 0 1 2 +9 0 0 0.018526162470332681 0.9816444 0.01122477 0.007130839 0 2 1 +10 0 0 0.017611300130802875 0.9825429 0.0106790941 0.00677801576 0 2 1 +11 0 0 0.01778869640055902 0.9823686 0.0106666889 0.00696469564 0 2 1 +12 0 0 0.019606150350793675 0.9805848 0.011293401 0.008121774 0 2 1 +13 0 0 0.019606150350793675 0.9805848 0.011293401 0.008121774 0 2 1 +14 0 0 0.026412295611523272 0.973933458 0.01333986 0.01272667 0 1 2 +15 0 0 0.025199192606323598 0.975115657 0.0133637665 0.0115205906 0 1 2 +16 0 0 0.018670259471363557 0.98150295 0.0106890509 0.0078079775 0 2 1 +17 0 0 0.017685676541357578 0.9824698 0.0105369147 0.006993265 0 2 1 +18 0 0 0.025319862054157889 0.974998 0.0134827839 0.0115192011 0 1 2 +19 0 0 0.017769887507106903 0.982387066 0.0106839687 0.006928955 0 2 1 +20 0 0 0.018154812191360413 0.982009 0.0104912994 0.00749973 0 2 1 +21 0 0 0.018599999721891122 0.9815719 0.0106898025 0.007738281 0 2 1 +22 0 0 0.017675059667948433 0.9824802 0.01080584 0.006713943 0 2 1 +23 0 0 0.019144535618423879 0.981037557 0.010790579 0.0081718415 0 2 1 +24 0 0 0.01786921459800335 0.9822895 0.0106762443 0.007034263 0 2 1 +25 0 0 0.019668760648275045 0.9805234 0.0111163342 0.008360277 0 2 1 +26 0 0 0.01871422746097379 0.9814598 0.0105026765 0.008037531 0 2 1 +27 0 0 0.017464080270232062 0.982687533 0.0105327293 0.006779738 0 2 1 +28 0 0 0.017410644882776549 0.982740045 0.0104760742 0.006783909 0 2 1 +29 0 0 0.018627325744847049 0.9815451 0.01122066 0.007234277 0 2 1 +30 0 0 0.018712891388611139 0.9814611 0.0112197 0.007319177 0 2 1 +31 0 0 0.018981599313886478 0.9811974 0.0104813036 0.008321293 0 2 1 +32 0 0 0.017550274352973697 0.982602835 0.010679746 0.00671739224 0 2 1 +33 0 0 0.021116150761496876 0.979105234 0.0108751021 0.0100196926 0 2 1 +34 0 0 0.018526162470332681 0.9816444 0.01122477 0.007130839 0 2 1 +35 0 0 0.018250049751936549 0.9819155 0.0110310521 0.007053445 0 2 1 +36 0 0 0.021060206686807477 0.97916 0.0107265348 0.0101134721 0 2 1 +37 0 0 0.018526162470332681 0.9816444 0.01122477 0.007130839 0 2 1 +38 0 0 0.019606150350793675 0.9805848 0.011293401 0.008121774 0 2 1 +39 0 0 0.017410644882776549 0.982740045 0.0104760742 0.006783909 0 2 1 +40 0 0 0.017692532068078491 0.982463062 0.0105409911 0.00699597038 0 2 1 +41 0 0 0.043281619871080874 0.957641661 0.0299625713 0.0123957563 0 1 2 +42 0 0 0.018428955784221195 0.9817398 0.0112173269 0.00704283174 0 2 1 +43 0 0 0.01876481733250969 0.981410146 0.0105582057 0.00803162251 0 2 1 +44 0 0 0.018924134452175709 0.9812538 0.0107116094 0.008034597 0 2 1 +45 0 0 0.019874004021370295 0.9803222 0.0112994574 0.008378385 0 2 1 +46 0 0 0.017750532918328971 0.9824061 0.0106895706 0.00690434966 0 2 1 +47 0 0 0.018428955784221195 0.9817398 0.0112173269 0.00704283174 0 2 1 +48 0 0 0.017550274352973697 0.982602835 0.010679746 0.00671739224 0 2 1 +49 0 0 0.017778139107128599 0.98237896 0.01077697 0.006844043 0 2 1 +50 1 1 0.041011093232678458 0.9598185 0.0318259969 0.008355537 1 2 0 +51 1 1 0.035006088801506997 0.965599537 0.0262838528 0.008116591 1 2 0 +52 1 1 0.40445010925845948 0.6673437 0.325442761 0.00721360464 1 2 0 +53 1 1 0.023231963861032118 0.9770358 0.0135212354 0.00944295153 1 2 0 +54 1 1 0.037938333582228756 0.9627723 0.0292570144 0.007970667 1 2 0 +55 1 1 0.026037026955485622 0.974299 0.01712974 0.008571236 1 2 0 +56 1 1 0.044224954690087047 0.9567387 0.034255214 0.009006079 1 2 0 +57 1 1 0.03303643367073926 0.9675033 0.0199500173 0.0125466976 1 0 2 +58 1 1 0.026098634044639583 0.974239 0.0178385321 0.007922452 1 2 0 +59 1 1 0.034668122729441544 0.965925932 0.0176332947 0.0164408013 1 2 0 +60 1 1 0.028925872510284695 0.9714885 0.0161692444 0.01234228 1 0 2 +61 1 1 0.032514946101960943 0.968008 0.0239325147 0.008059508 1 2 0 +62 1 1 0.024235704335496926 0.9760556 0.0151068689 0.008837529 1 2 0 +63 1 1 0.035760629302617933 0.9648712 0.02719415 0.007934644 1 2 0 +64 1 1 0.022137329869205608 0.9781059 0.0122523606 0.009641748 1 2 0 +65 1 1 0.02697255414826619 0.973387957 0.0186308678 0.007981158 1 2 0 +66 1 1 0.036729417078252675 0.9639369 0.02751693 0.008546158 1 2 0 +67 1 1 0.022503395570932342 0.9777479 0.013421339 0.008830723 1 2 0 +68 1 1 0.038855074517123001 0.9618901 0.0301695466 0.007940366 1 2 0 +69 1 1 0.023358863535601033 0.976911843 0.0131962541 0.009891883 1 2 0 +70 1 2 2.0041753304058187 0.856793344 0.134771392 0.008435239 2 1 0 +71 1 1 0.023006329648145647 0.9772563 0.014135709 0.00860798452 1 2 0 +72 1 1 0.43301575781706009 0.6485503 0.343778223 0.007671475 1 2 0 +73 1 1 0.028976552148168057 0.971439242 0.02040257 0.008158188 1 2 0 +74 1 1 0.022902770892273123 0.9773575 0.0147489849 0.007893479 1 2 0 +75 1 1 0.02691058704543008 0.9734483 0.01860106 0.007950675 1 2 0 +76 1 1 0.18688860192812912 0.82953614 0.162185773 0.008278089 1 2 0 +77 1 2 0.92579454557270313 0.5967647 0.396216482 0.00701882 2 1 0 +78 1 1 0.037027566079820731 0.9636496 0.028475076 0.007875373 1 2 0 +79 1 1 0.022937167310755541 0.9773239 0.0127002485 0.009975866 1 2 0 +80 1 1 0.022877401242447332 0.9773823 0.0125660188 0.0100516649 1 2 0 +81 1 1 0.022877401242447332 0.9773823 0.0125660188 0.0100516649 1 2 0 +82 1 1 0.023404014419184039 0.976867735 0.0133821117 0.009750146 1 2 0 +83 1 1 0.70075639726830674 0.49620983 0.496017843 0.007772315 1 2 0 +84 1 1 0.044236542522837867 0.9567276 0.0263874587 0.0168849323 1 2 0 +85 1 1 0.043024535046640382 0.9578879 0.03138008 0.01073204 1 2 0 +86 1 1 0.042293035786450502 0.958588839 0.0332719 0.008139238 1 2 0 +87 1 1 0.025156527765232962 0.975157261 0.0169389956 0.0079037305 1 2 0 +88 1 1 0.022358318540260842 0.9778898 0.0133089311 0.008801297 1 2 0 +89 1 1 0.023121976834571175 0.9771433 0.0134127168 0.009443991 1 2 0 +90 1 1 0.022650566634269925 0.977604032 0.0135507975 0.008845187 1 2 0 +91 1 1 0.030649193522063412 0.969815731 0.0222469531 0.00793733 1 2 0 +92 1 1 0.023666418009492051 0.976611435 0.0139777046 0.009410832 1 2 0 +93 1 1 0.032170618995321684 0.968341351 0.0193278752 0.0123307668 1 0 2 +94 1 1 0.023670568200763999 0.9766074 0.0147704128 0.008622234 1 2 0 +95 1 1 0.022473768887261287 0.9777769 0.0132312737 0.008991833 1 2 0 +96 1 1 0.022786722203040281 0.977470934 0.013828597 0.008700479 1 2 0 +97 1 1 0.022998705699126043 0.977263749 0.0148692625 0.00786699 1 2 0 +98 1 1 0.1831783948365549 0.8326196 0.154965982 0.0124144387 1 0 2 +99 1 1 0.023075496769295524 0.9771887 0.0140385861 0.008772707 1 2 0 +100 2 2 0.021566800361238793 0.9786641 0.0126012722 0.008734606 2 1 0 +101 2 2 0.042422564225694827 0.9584647 0.03307351 0.008461786 2 1 0 +102 2 2 0.016595760358213897 0.9835412 0.008572242 0.007886566 2 1 0 +103 2 2 0.022044646180015223 0.978196561 0.0139042465 0.007899184 2 1 0 +104 2 2 0.016709516937862282 0.9834293 0.008683964 0.007886705 2 1 0 +105 2 2 0.016595760358213897 0.9835412 0.008572242 0.007886566 2 1 0 +106 2 1 2.1090311772834194 0.862226963 0.121355481 0.0164175835 1 2 0 +107 2 2 0.020456222190452011 0.9797516 0.0123546757 0.00789376 2 1 0 +108 2 2 0.021078468826306502 0.9791421 0.0129446462 0.00791325 2 1 0 +109 2 2 0.019468846928401097 0.980719447 0.0106313359 0.008649208 2 0 1 +110 2 2 0.022319614569829012 0.9779276 0.0139611857 0.008111184 2 1 0 +111 2 2 0.019269823866623015 0.980914652 0.0112084318 0.007876894 2 1 0 +112 2 2 0.016977687007301158 0.9831656 0.00894971 0.007884688 2 1 0 +113 2 2 0.058042636346825223 0.9436097 0.047512807 0.008877498 2 1 0 +114 2 2 0.041556106366798046 0.9592955 0.0322531648 0.008451329 2 1 0 +115 2 2 0.019791317974830646 0.980403244 0.0115674054 0.008029342 2 1 0 +116 2 2 0.020432982922414907 0.979774356 0.0123304194 0.007895238 2 1 0 +117 2 2 0.019466962860278662 0.9807213 0.0106303059 0.008648369 2 0 1 +118 2 2 0.0188647899585574 0.981312037 0.0108158737 0.007872073 2 1 0 +119 2 1 0.94392904693686375 0.6026611 0.389096051 0.008242885 1 2 0 +120 2 2 0.017366248980001619 0.9827837 0.009171124 0.008045211 2 1 0 +121 2 2 0.088866562817115968 0.914967656 0.07653753 0.008494817 2 1 0 +122 2 2 0.018631090722093534 0.9815414 0.0105879167 0.007870668 2 1 0 +123 2 2 0.06145008441574909 0.9403999 0.05166457 0.007935554 2 1 0 +124 2 2 0.018444680647443288 0.9817244 0.009521433 0.008754187 2 1 0 +125 2 2 0.020508786296772101 0.9797001 0.01224504 0.008054874 2 1 0 +126 2 2 0.11925013225824062 0.887585759 0.104224905 0.008189328 2 1 0 +127 2 2 0.10605093599317304 0.899378836 0.0929255262 0.00769562228 2 1 0 +128 2 2 0.018565509274527169 0.981605768 0.010519552 0.007874679 2 1 0 +129 2 2 0.39821823307895049 0.671515465 0.320187718 0.008296814 2 1 0 +130 2 2 0.018932334838268155 0.981245756 0.0108812321 0.007873024 2 1 0 +131 2 2 0.019466962860278662 0.9807213 0.0106303059 0.008648369 2 0 1 +132 2 2 0.018565509274527169 0.981605768 0.010519552 0.007874679 2 1 0 +133 2 2 0.62873836754113976 0.53326416 0.458078653 0.008657171 2 1 0 +134 2 2 0.60851248990646989 0.5441597 0.448350728 0.00748954341 2 1 0 +135 2 2 0.016595760358213897 0.9835412 0.008572242 0.007886566 2 1 0 +136 2 2 0.022577039360831119 0.9776759 0.0117904674 0.0105336225 2 1 0 +137 2 2 0.022975285291276865 0.977286637 0.0148325013 0.007880885 2 1 0 +138 2 2 0.16372490438447862 0.848975539 0.142925009 0.008099449 2 1 0 +139 2 2 0.017566167387754482 0.9825872 0.00952361 0.0078891525 2 1 0 +140 2 2 0.017952470071537514 0.9822077 0.009910649 0.007881647 2 1 0 +141 2 2 0.02156308521875348 0.978667736 0.0133916177 0.007940661 2 1 0 +142 2 2 0.042422564225694827 0.9584647 0.03307351 0.008461786 2 1 0 +143 2 2 0.01734514342237381 0.9828044 0.009152353 0.008043245 2 1 0 +144 2 2 0.018444680647443288 0.9817244 0.009521433 0.008754187 2 1 0 +145 2 2 0.018486635063884913 0.9816832 0.0104312422 0.007885565 2 1 0 +146 2 2 0.035952581762754975 0.964686036 0.0272667371 0.008047254 2 1 0 +147 2 2 0.018195479765991927 0.981969059 0.0101450672 0.007885858 2 1 0 +148 2 2 0.02575522444396355 0.9745736 0.0148916543 0.0105347475 2 1 0 +149 2 2 0.043968997509596895 0.9569836 0.0350129567 0.008003409 2 1 0 diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt new file mode 100644 index 0000000000..4d9f2f452b --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-out.txt @@ -0,0 +1,36 @@ +maml.exe TrainTest test=%Data% tr=OVA{p=AvgPer{ lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 +Not adding a normalizer. +Training learner 0 +Training calibrator. +Training learner 1 +Training calibrator. +Training learner 2 +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 50 | 0 | 0 | 1.0000 + 1 || 0 | 48 | 2 | 0.9600 + 2 || 0 | 0 | 50 | 1.0000 + ||======================== +Precision ||1.0000 |1.0000 |0.9615 | +Accuracy(micro-avg): 0.986667 +Accuracy(macro-avg): 0.986667 +Log-loss: 0.246444 +Log-loss reduction: 77.567746 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.986667 (0.0000) +Accuracy(macro-avg): 0.986667 (0.0000) +Log-loss: 0.246444 (0.0000) +Log-loss reduction: 77.567746 (0.0000) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt new file mode 100644 index 0000000000..971b18dd55 --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris-rp.txt @@ -0,0 +1,4 @@ +OVA +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.986667 0.986667 0.246444 77.56775 AvgPer{lr=0.8} OVA %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=OVA{p=AvgPer{ lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} + diff --git a/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt new file mode 100644 index 0000000000..cbb0c14c4f --- /dev/null +++ b/test/BaselineOutput/Common/OVA/OVA-TrainTest-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +0 0 0 0.064187757670407464 0.9378289 0.0621710941 8.6083265E-09 0 1 2 +1 0 0 0.1893002350721239 0.827538 0.172461912 5.35715046E-08 0 1 2 +2 0 0 0.13195967886821539 0.876376331 0.123623639 3.315678E-08 0 1 2 +3 0 0 0.18799826814680765 0.828616142 0.1713837 1.33305463E-07 0 1 2 +4 0 0 0.053931560367015983 0.947496951 0.0525030419 8.35157454E-09 0 1 2 +5 0 0 0.024550920036862604 0.975748 0.0242519863 8.78616E-09 0 1 2 +6 0 0 0.092104047221280363 0.912010252 0.08798967 5.086476E-08 0 1 2 +7 0 0 0.0890750453193484 0.9147769 0.08522307 2.25849668E-08 0 1 2 +8 0 0 0.2649651905225579 0.767232656 0.23276712 2.323326E-07 0 1 2 +9 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 +10 0 0 0.038712934227392123 0.962026834 0.037973173 3.67671626E-09 0 1 2 +11 0 0 0.10398316683257577 0.901240468 0.09875945 5.7562815E-08 0 1 2 +12 0 0 0.20701490311777249 0.813007534 0.186992422 5.15469765E-08 0 1 2 +13 0 0 0.20677930042755727 0.8131991 0.186800852 4.620836E-08 0 1 2 +14 0 0 0.012900958726204939 0.9871819 0.0128180888 1.53412311E-10 0 1 2 +15 0 0 0.0056796126449842428 0.9943365 0.00566351926 4.0924758E-10 0 1 2 +16 0 0 0.018169258095637245 0.9819948 0.018005196 1.47014889E-09 0 1 2 +17 0 0 0.059814766253305368 0.941939 0.0580609739 1.13122178E-08 0 1 2 +18 0 0 0.028932621466359262 0.9714819 0.02851807 4.23276969E-09 0 1 2 +19 0 0 0.032923391844126633 0.9676127 0.0323873 7.642096E-09 0 1 2 +20 0 0 0.08651372619399908 0.91712296 0.08287702 2.06279154E-08 0 1 2 +21 0 0 0.038368820295834986 0.962357938 0.03764207 1.32810882E-08 0 1 2 +22 0 0 0.04847884988937378 0.9526775 0.0473225228 3.781359E-09 0 1 2 +23 0 0 0.099972123278544239 0.904862642 0.09513722 1.30113463E-07 0 1 2 +24 0 0 0.12884486436940729 0.879110336 0.120889448 2.201515E-07 0 1 2 +25 0 0 0.20701321690255622 0.8130089 0.186990991 1.01994196E-07 0 1 2 +26 0 0 0.083707914672920755 0.919699848 0.08030011 6.121948E-08 0 1 2 +27 0 0 0.065971839514429781 0.9361572 0.063842766 1.05041069E-08 0 1 2 +28 0 0 0.076179360015026509 0.92665 0.0733500347 8.859791E-09 0 1 2 +29 0 0 0.16075048860491603 0.8515045 0.148495391 1.25408391E-07 0 1 2 +30 0 0 0.18626475341925278 0.8300538 0.169946045 1.27813465E-07 0 1 2 +31 0 0 0.065274392613973067 0.9368104 0.0631895959 1.46311239E-08 0 1 2 +32 0 0 0.018341836022086215 0.981825352 0.018174639 1.48765988E-09 0 1 2 +33 0 0 0.010831873835231624 0.9892266 0.0107734147 4.45536885E-10 0 1 2 +34 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 +35 0 0 0.1094558082339183 0.8963218 0.103678234 1.02393658E-08 0 1 2 +36 0 0 0.049986936620370084 0.951241851 0.04875815 2.05979567E-09 0 1 2 +37 0 0 0.17798339361396398 0.8369563 0.163043633 4.85697953E-08 0 1 2 +38 0 0 0.2126495148617171 0.808439434 0.191560462 1.15782285E-07 0 1 2 +39 0 0 0.085343924605329857 0.91819644 0.08180357 1.76697252E-08 0 1 2 +40 0 0 0.058200059976790904 0.9434612 0.05653884 9.271867E-09 0 1 2 +41 0 0 0.49728774260829139 0.60817796 0.391821355 7.12752467E-07 0 1 2 +42 0 0 0.14839409412278926 0.8620913 0.137908638 6.888182E-08 0 1 2 +43 0 0 0.058733854938205904 0.9429577 0.0570422448 8.043525E-08 0 1 2 +44 0 0 0.041432529938052375 0.959414065 0.0405859053 6.019737E-08 0 1 2 +45 0 0 0.18582725015020429 0.830417037 0.169582859 9.04475E-08 0 1 2 +46 0 0 0.038175474655158276 0.962544 0.03745597 9.106087E-09 0 1 2 +47 0 0 0.14649366336893571 0.8637312 0.13626872 6.586965E-08 0 1 2 +48 0 0 0.040528072578562842 0.9602822 0.03971778 4.706963E-09 0 1 2 +49 0 0 0.10224582319060745 0.9028076 0.09719238 1.898811E-08 0 1 2 +50 1 1 0.13228530826110765 0.876091 0.07536332 0.04854567 1 0 2 +51 1 1 0.19869535635760471 0.8197996 0.101386108 0.0788142756 1 2 0 +52 1 1 0.22406365925144375 0.799264252 0.175007358 0.0257283952 1 2 0 +53 1 1 0.22846786324242829 0.79575187 0.190147012 0.0141011067 1 2 0 +54 1 1 0.2217374832444966 0.801125646 0.1814971 0.0173772536 1 2 0 +55 1 1 0.36194510206226777 0.6963206 0.290218145 0.0134612536 1 2 0 +56 1 1 0.38227740308279812 0.682305753 0.26903218 0.04866208 1 2 0 +57 1 1 0.11711832955706668 0.889479935 0.09261171 0.01790834 1 0 2 +58 1 1 0.11588080546894164 0.890581369 0.0796031356 0.0298154745 1 2 0 +59 1 1 0.22549808191140069 0.7981186 0.1629105 0.0389709137 1 2 0 +60 1 1 0.11452610106565105 0.891788661 0.08094173 0.027269613 1 2 0 +61 1 1 0.19606227523723149 0.821961045 0.1124982 0.0655407459 1 2 0 +62 1 1 0.065211469379248863 0.9368693 0.0428994 0.0202313047 1 2 0 +63 1 1 0.38674137023255867 0.679266751 0.3082701 0.0124631459 1 2 0 +64 1 1 0.23030100927747191 0.7942945 0.193664417 0.0120411161 1 0 2 +65 1 1 0.13921020030567513 0.8700451 0.0984156 0.0315392464 1 0 2 +66 1 1 0.52761682473815386 0.5900094 0.392468572 0.0175220035 1 2 0 +67 1 1 0.084603908782305962 0.9188762 0.0453156643 0.03580814 1 0 2 +68 1 1 0.4618931830241112 0.630089641 0.3666358 0.00327458978 1 2 0 +69 1 1 0.090485397720604446 0.9134877 0.04816054 0.03835176 1 2 0 +70 1 2 1.0140603005132554 0.6271839 0.362743139 0.0100729465 2 1 0 +71 1 1 0.11008255968443192 0.8957602 0.07861323 0.0256266128 1 0 2 +72 1 1 0.64771052952961239 0.523242354 0.474618584 0.00213904912 1 2 0 +73 1 1 0.27755598196422471 0.75763315 0.230862036 0.011504828 1 2 0 +74 1 1 0.10244027519493581 0.902632058 0.0589159 0.0384520665 1 0 2 +75 1 1 0.11841763901117131 0.888325 0.0673675239 0.0443075225 1 0 2 +76 1 1 0.19025677181646344 0.8267468 0.160040483 0.0132126883 1 2 0 +77 1 1 0.61336152114875442 0.54152745 0.4505507 0.00792186148 1 2 0 +78 1 1 0.33449082060539098 0.7157024 0.264204919 0.0200926811 1 2 0 +79 1 1 0.1490904339680772 0.8614912 0.134173632 0.00433517434 1 0 2 +80 1 1 0.089822225657751095 0.9140937 0.04908715 0.0368192 1 2 0 +81 1 1 0.077764627112278675 0.925182164 0.0500315838 0.0247862767 1 0 2 +82 1 1 0.10062242034830922 0.9042744 0.06707825 0.028647339 1 0 2 +83 1 2 0.87242455033259836 0.580679059 0.417937 0.001383926 2 1 0 +84 1 1 0.64643118238444741 0.5239122 0.462312371 0.0137754688 1 2 0 +85 1 1 0.38078103791751566 0.6833275 0.219007626 0.09766489 1 2 0 +86 1 1 0.18654240234510375 0.8298234 0.129826292 0.0403503366 1 2 0 +87 1 1 0.19556197322229293 0.8223724 0.169585884 0.008041753 1 2 0 +88 1 1 0.16967653754303061 0.843937755 0.07920382 0.07685845 1 2 0 +89 1 1 0.17942883979161328 0.8357474 0.140764222 0.0234883726 1 2 0 +90 1 1 0.37073556760095389 0.690226436 0.2999358 0.009837814 1 2 0 +91 1 1 0.28153328535230204 0.7546258 0.221098259 0.02427597 1 2 0 +92 1 1 0.095651081105254609 0.908781052 0.0509764329 0.0402425081 1 2 0 +93 1 1 0.10201305873404877 0.903017759 0.07907509 0.0179071445 1 0 2 +94 1 1 0.21320461976679056 0.8079908 0.167838141 0.02417106 1 2 0 +95 1 1 0.14772366482527016 0.862669468 0.07083947 0.06649103 1 2 0 +96 1 1 0.1676055742511661 0.8456873 0.107580893 0.04673176 1 2 0 +97 1 1 0.11592859305433842 0.8905388 0.058125712 0.051335495 1 2 0 +98 1 1 0.24343470732109662 0.783930659 0.212896168 0.00317314919 1 0 2 +99 1 1 0.14288965000739814 0.8668497 0.0867102742 0.0464400165 1 2 0 +100 2 2 0.22003618725213686 0.802489758 0.197204947 0.000305285153 2 1 0 +101 2 2 0.4769116280424554 0.6206974 0.378338724 0.0009639265 2 1 0 +102 2 2 0.32248110542186914 0.7243496 0.2752483 0.000402083038 2 1 0 +103 2 2 0.4318088621640172 0.6493335 0.3500793 0.00058722886 2 1 0 +104 2 2 0.33701376028625557 0.713899 0.2857394 0.000361559069 2 1 0 +105 2 2 0.36111778351144497 0.6968969 0.303009778 9.33285046E-05 2 1 0 +106 2 2 0.57324912019977703 0.56369096 0.434159666 0.00214935932 2 1 0 +107 2 2 0.43241433229369408 0.648940444 0.3508895 0.000170046318 2 1 0 +108 2 2 0.5524780213978463 0.5755219 0.424283057 0.000195083121 2 1 0 +109 2 2 0.10018846863192231 0.9046669 0.0947069 0.000626212 2 1 0 +110 2 2 0.34368591687548161 0.7091516 0.2858574 0.00499094324 2 1 0 +111 2 2 0.46723123458330745 0.626735151 0.3724764 0.000788469857 2 1 0 +112 2 2 0.31582627414124387 0.7291861 0.2698556 0.0009583179 2 1 0 +113 2 2 0.52239827082819301 0.593096435 0.406178564 0.00072502665 2 1 0 +114 2 2 0.36058132074789678 0.6972709 0.301918417 0.000810695637 2 1 0 +115 2 2 0.21578587862382262 0.805907845 0.192418143 0.00167402264 2 1 0 +116 2 2 0.38868733060861138 0.6779462 0.3210066 0.00104722043 2 1 0 +117 2 2 0.097428111747803359 0.907167554 0.0924843 0.000348151079 2 1 0 +118 2 2 0.51096864540412867 0.5999142 0.400067 1.88108879E-05 2 1 0 +119 2 2 0.64730667194394087 0.5234537 0.4758455 0.000700797769 2 1 0 +120 2 2 0.21506259321317989 0.806490958 0.192730322 0.0007787307 2 1 0 +121 2 2 0.42372860353635938 0.6546015 0.3437634 0.00163504237 2 1 0 +122 2 2 0.45634525198899917 0.633595049 0.366349727 5.52281235E-05 2 1 0 +123 2 2 0.56778894609208741 0.5667772 0.4307018 0.00252098124 2 1 0 +124 2 2 0.22282641548991947 0.800253749 0.198714435 0.00103183649 2 1 0 +125 2 2 0.31027244305630025 0.733247161 0.266095132 0.0006576972 2 1 0 +126 2 2 0.59157505509617891 0.5534549 0.442328155 0.004216949 2 1 0 +127 2 2 0.48357066476934346 0.616577864 0.3786479 0.00477422541 2 1 0 +128 2 2 0.41595305224046536 0.659711242 0.339890242 0.0003985048 2 1 0 +129 2 2 0.45550524433223816 0.6341275 0.364983171 0.0008893516 2 1 0 +130 2 2 0.43130825350797314 0.6496586 0.3501238 0.000217600667 2 1 0 +131 2 2 0.11700817008638008 0.8895779 0.109250523 0.00117157528 2 1 0 +132 2 2 0.40127211138702135 0.669467866 0.330161959 0.000370198279 2 1 0 +133 2 2 0.63157763358593388 0.5317522 0.465779215 0.002468573 2 1 0 +134 2 2 0.58314815792662678 0.5581385 0.441456854 0.000404651684 2 1 0 +135 2 2 0.27204979854142897 0.7618163 0.2379036 0.000280094158 2 1 0 +136 2 2 0.1629980271721547 0.849592865 0.149376482 0.00103064778 2 1 0 +137 2 2 0.35495650373339338 0.701203942 0.297558725 0.00123733515 2 1 0 +138 2 2 0.51228059795956926 0.59912765 0.394762278 0.00611006049 2 1 0 +139 2 2 0.29479453684898038 0.7446846 0.253554732 0.00176069664 2 1 0 +140 2 2 0.23636956568495673 0.789488852 0.209835038 0.000676139141 2 1 0 +141 2 2 0.28994324565081614 0.748306036 0.247889653 0.0038042888 2 1 0 +142 2 2 0.4769116280424554 0.6206974 0.378338724 0.0009639265 2 1 0 +143 2 2 0.23597765808861054 0.7897983 0.209748372 0.0004533111 2 1 0 +144 2 2 0.16769889506754851 0.8456084 0.153658465 0.000733090041 2 1 0 +145 2 2 0.27656426046073113 0.7583849 0.239901781 0.001713358 2 1 0 +146 2 2 0.5356947512214818 0.585262537 0.413683951 0.00105350767 2 1 0 +147 2 2 0.35725414522256133 0.6995947 0.298375219 0.00203008 2 1 0 +148 2 2 0.16789042737345514 0.845446467 0.152793139 0.00176038919 2 1 0 +149 2 2 0.4005334114812254 0.6699626 0.327896535 0.002140856 2 1 0 diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt new file mode 100644 index 0000000000..f71990cd92 --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-out.txt @@ -0,0 +1,70 @@ +maml.exe CV tr=PKPD{p=AvgPer { lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 +Not adding a normalizer. +Training learner (0,0) +Training calibrator. +Training learner (1,0) +Training calibrator. +Training learner (1,1) +Training calibrator. +Training learner (2,0) +Training calibrator. +Training learner (2,1) +Training calibrator. +Training learner (2,2) +Training calibrator. +Not training a calibrator because it is not needed. +Not adding a normalizer. +Training learner (0,0) +Training calibrator. +Training learner (1,0) +Training calibrator. +Training learner (1,1) +Training calibrator. +Training learner (2,0) +Training calibrator. +Training learner (2,1) +Training calibrator. +Training learner (2,2) +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 21 | 0 | 0 | 1.0000 + 1 || 0 | 26 | 4 | 0.8667 + 2 || 0 | 0 | 28 | 1.0000 + ||======================== +Precision ||1.0000 |1.0000 |0.8750 | +Accuracy(micro-avg): 0.949367 +Accuracy(macro-avg): 0.955556 +Log-loss: 0.343967 +Log-loss reduction: 68.371359 + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 29 | 0 | 0 | 1.0000 + 1 || 0 | 19 | 1 | 0.9500 + 2 || 0 | 0 | 22 | 1.0000 + ||======================== +Precision ||1.0000 |1.0000 |0.9565 | +Accuracy(micro-avg): 0.985915 +Accuracy(macro-avg): 0.983333 +Log-loss: 0.277101 +Log-loss reduction: 74.475991 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.967641 (0.0183) +Accuracy(macro-avg): 0.969444 (0.0139) +Log-loss: 0.310534 (0.0334) +Log-loss reduction: 71.423675 (3.0523) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt new file mode 100644 index 0000000000..4ee447e298 --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris-rp.txt @@ -0,0 +1,4 @@ +PKPD +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.967641 0.969444 0.310534 71.42368 AvgPer{lr=0.8} PKPD %Data% %Output% 99 0 0 maml.exe CV tr=PKPD{p=AvgPer { lr=0.8 }} threads=- norm=No dout=%Output% data=%Data% seed=1 /p:AvgPer{lr=0.8} + diff --git a/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt new file mode 100644 index 0000000000..100ad1843e --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-CV-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +5 0 0 0.023753880830600982 0.976526 0.0234627537 1.12415537E-05 0 1 2 +6 0 0 0.1302157926188138 0.877905965 0.122053728 4.028206E-05 0 1 2 +8 0 0 0.38835109531985795 0.6781742 0.321734548 9.125436E-05 0 1 2 +9 0 0 0.25023100275372456 0.7786209 0.221342817 3.62790561E-05 0 1 2 +10 0 0 0.043651773791107358 0.957287252 0.0427056365 7.135807E-06 0 1 2 +11 0 0 0.13521420369658141 0.8735288 0.126428857 4.23762431E-05 0 1 2 +18 0 0 0.028430623462091707 0.9719697 0.02802308 7.212694E-06 0 1 2 +20 0 0 0.10458121712711159 0.900701642 0.0992766 2.17436227E-05 0 1 2 +21 0 0 0.045363134495307413 0.9556504 0.0443333276 1.62607685E-05 0 1 2 +25 0 0 0.28906012724616753 0.7489672 0.250976235 5.657253E-05 0 1 2 +28 0 0 0.10227830625980082 0.902778268 0.0972085 1.322162E-05 0 1 2 +31 0 0 0.08562861703311947 0.9179351 0.08204699 1.79389826E-05 0 1 2 +32 0 0 0.017136477098684464 0.9830095 0.0169868153 3.65674259E-06 0 1 2 +35 0 0 0.16731981579854963 0.845929 0.1540563 1.46633747E-05 0 1 2 +37 0 0 0.25023100275372456 0.7786209 0.221342817 3.62790561E-05 0 1 2 +40 0 0 0.079660704938754975 0.9234296 0.0765565857 1.37818333E-05 0 1 2 +41 0 0 0.64555225289552742 0.5243729 0.475452363 0.0001747969 0 1 2 +44 0 0 0.040687480851420016 0.960129142 0.0398315266 3.935343E-05 0 1 2 +45 0 0 0.27950273039157197 0.756159663 0.243786544 5.377483E-05 0 1 2 +46 0 0 0.040918754924550584 0.9599071 0.0400804244 1.24371918E-05 0 1 2 +48 0 0 0.046121164137562148 0.954926252 0.04506536 8.38812048E-06 0 1 2 +50 1 1 0.72887025154137375 0.482453734 0.272260427 0.245285824 1 0 2 +51 1 1 0.8088987568127185 0.445348233 0.336972356 0.2176794 1 2 0 +52 1 1 0.78136433840766417 0.457781017 0.438075066 0.104143932 1 2 0 +54 1 1 0.54403068907517893 0.5804041 0.36454007 0.0550558232 1 2 0 +56 1 2 1.1200462355861258 0.531909943 0.3262647 0.141825333 2 1 0 +60 1 1 0.23491005150903682 0.790641963 0.1784061 0.0309519637 1 2 0 +63 1 1 0.68939365760121074 0.5018803 0.458269477 0.0398502052 1 2 0 +64 1 1 0.4797019767355476 0.618967831 0.291831881 0.0892002955 1 0 2 +66 1 2 0.86155959099850343 0.5307835 0.422502637 0.04671388 2 1 0 +68 1 1 0.49420884947307447 0.61005336 0.3815175 0.008429126 1 2 0 +69 1 1 0.23145347728671642 0.7933796 0.145455226 0.061165195 1 2 0 +70 1 2 1.4851413739165595 0.736525655 0.226470321 0.03700401 2 1 0 +71 1 1 0.3561851307666371 0.700342953 0.164127111 0.13552995 1 0 2 +72 1 1 0.67173548994464938 0.5108213 0.4814403 0.007738397 1 2 0 +73 1 1 0.50612015658391962 0.602829933 0.3625389 0.034631148 1 2 0 +74 1 1 0.41484498646830853 0.66044265 0.18213135 0.157426015 1 2 0 +76 1 1 0.50271064477753691 0.6048888 0.345186323 0.0499248542 1 2 0 +77 1 2 1.0467776624316247 0.6128986 0.3510672 0.0360342041 2 1 0 +79 1 1 0.25594672318320155 0.7741832 0.183944046 0.0418727621 1 0 2 +82 1 1 0.28889429116536303 0.7490914 0.130277559 0.120631076 1 2 0 +88 1 1 0.52394753213635203 0.5921783 0.2563273 0.151494384 1 2 0 +90 1 1 0.45035315262659448 0.637403 0.3411746 0.0214223787 1 2 0 +91 1 1 0.68738585386581597 0.502889 0.425832123 0.07127889 1 2 0 +92 1 1 0.2697499931611898 0.763570368 0.162871167 0.0735584348 1 2 0 +93 1 1 0.17823538741474004 0.836745441 0.0821188241 0.08113571 1 2 0 +95 1 1 0.48391980144761892 0.616362631 0.241359085 0.1422783 1 2 0 +96 1 1 0.47873437290577431 0.619567037 0.281721354 0.09871157 1 2 0 +97 1 1 0.4309836113144338 0.649869561 0.220002741 0.130127683 1 2 0 +98 1 1 0.27001090521188814 0.76337117 0.202765256 0.0338635966 1 0 2 +99 1 1 0.40366256068064504 0.667869449 0.2403792 0.09175138 1 2 0 +100 2 2 0.098423959135663375 0.9062646 0.09183126 0.001904126 2 1 0 +102 2 2 0.18784362467414789 0.8287443 0.16809994 0.00315576326 2 1 0 +104 2 2 0.20305260298998334 0.8162353 0.181571469 0.00219323137 2 1 0 +105 2 2 0.16738689651485589 0.8458723 0.153207168 0.000920576451 2 1 0 +106 2 2 0.58800293393782621 0.5554354 0.439967334 0.00459726434 2 1 0 +108 2 2 0.4731297912724482 0.6230492 0.37598455 0.0009662311 2 1 0 +109 2 2 0.035500468049352432 0.9651223 0.0299819969 0.00489571551 2 1 0 +111 2 2 0.39672088491078095 0.6725217 0.3237172 0.00376107777 2 1 0 +112 2 2 0.20709995070378101 0.8129384 0.180399537 0.00666203769 2 1 0 +113 2 2 0.49689364308238115 0.6084177 0.3893122 0.002270126 2 1 0 +115 2 2 0.12645623913049378 0.8812127 0.109346546 0.009440767 2 1 0 +117 2 2 0.025087766630295084 0.9752243 0.02114348 0.00363221765 2 1 0 +120 2 2 0.11093517828288008 0.894996762 0.09939285 0.005610376 2 1 0 +121 2 2 0.37719650665097176 0.6857813 0.3084725 0.00574616855 2 1 0 +122 2 2 0.25512580405327051 0.774819 0.224642664 0.0005383256 2 1 0 +123 2 2 0.53470400837092702 0.585842669 0.4030093 0.0111480216 2 1 0 +125 2 2 0.15798263383297981 0.8538646 0.139957428 0.00617795158 2 1 0 +128 2 2 0.30543205932131445 0.736804962 0.2611207 0.00207435014 2 1 0 +129 2 2 0.31304921188329871 0.7312139 0.2608706 0.00791547 2 1 0 +131 2 2 0.050064073835890829 0.9511685 0.03442935 0.01440218 2 1 0 +132 2 2 0.29000752749814246 0.748257935 0.249804661 0.00193742709 2 1 0 +133 2 2 0.57582423964286222 0.562241256 0.4260323 0.0117264669 2 1 0 +137 2 2 0.23369102254431959 0.791606367 0.2005785 0.007815139 2 1 0 +138 2 2 0.38238249988924056 0.682234049 0.29368493 0.0240810253 2 1 0 +141 2 2 0.18873995406873867 0.8280018 0.150254741 0.0217434336 2 1 0 +144 2 2 0.080216932325286969 0.9229161 0.0723539 0.00473001366 2 1 0 +145 2 2 0.18503665462933813 0.8310738 0.158534229 0.0103919385 2 1 0 +147 2 2 0.25370059016834895 0.7759241 0.212361827 0.0117140962 2 1 0 +0 0 0 0.10211748577393479 0.902923465 0.09707638 1.31251113E-07 0 1 2 +1 0 0 0.18414005419872645 0.8318193 0.1681801 6.10566246E-07 0 1 2 +2 0 0 0.14492385261258312 0.865088165 0.134911478 3.69817315E-07 0 1 2 +3 0 0 0.18018182361642154 0.835118353 0.164880455 1.1808728E-06 0 1 2 +4 0 0 0.091854748075686457 0.912237644 0.08776226 1.22251592E-07 0 1 2 +7 0 0 0.12233785238606552 0.88484937 0.115150325 2.89829131E-07 0 1 2 +12 0 0 0.18838937109230286 0.828292131 0.1717073 5.786816E-07 0 1 2 +13 0 0 0.17483938525704146 0.839591861 0.1604077 4.46853E-07 0 1 2 +14 0 0 0.045135070620344352 0.955868363 0.0441316478 5.3407363E-09 0 1 2 +15 0 0 0.030612994376980014 0.969850838 0.0301491246 1.0891493E-08 0 1 2 +16 0 0 0.055112338614887763 0.9463788 0.05362115 3.08450865E-08 0 1 2 +17 0 0 0.10062993461057677 0.9042676 0.09573224 1.63700875E-07 0 1 2 +19 0 0 0.073413324188812371 0.9292167 0.07078323 1.13846653E-07 0 1 2 +22 0 0 0.080101141800871592 0.923023 0.07697697 5.50810348E-08 0 1 2 +23 0 0 0.14576182189759379 0.864363551 0.135635108 1.32143555E-06 0 1 2 +24 0 0 0.15527358603878322 0.8561809 0.143817171 1.903017E-06 0 1 2 +26 0 0 0.12564253084337382 0.881930053 0.118069261 6.645889E-07 0 1 2 +27 0 0 0.10587439997529158 0.8995376 0.100462213 1.60874819E-07 0 1 2 +29 0 0 0.16779715917645741 0.8455253 0.154473513 1.14837974E-06 0 1 2 +30 0 0 0.18480739195523371 0.8312644 0.168734416 1.2252525E-06 0 1 2 +33 0 0 0.04032543480505283 0.9604768 0.03952316 1.14692389E-08 0 1 2 +34 0 0 0.17552640171742001 0.839015245 0.160984188 5.622917E-07 0 1 2 +36 0 0 0.09204810470455442 0.9120613 0.0879386961 4.5151662E-08 0 1 2 +38 0 0 0.18661279637066258 0.829764962 0.170234054 9.896429E-07 0 1 2 +39 0 0 0.12071723979290006 0.88628453 0.113715246 2.43832517E-07 0 1 2 +42 0 0 0.15054831358788412 0.860236168 0.139763221 6.199213E-07 0 1 2 +43 0 0 0.10889597554551136 0.8968237 0.103175469 8.1666E-07 0 1 2 +47 0 0 0.15393456680529674 0.8573281 0.142671227 6.39328334E-07 0 1 2 +49 0 0 0.13066902060601812 0.877508163 0.122491583 2.52864339E-07 0 1 2 +53 1 1 0.2827592626915541 0.7537012 0.21389237 0.03240643 1 2 0 +55 1 1 0.40583815374571869 0.666418 0.308232874 0.0253491253 1 2 0 +57 1 1 0.22836674855401806 0.795832336 0.173427463 0.0307402182 1 0 2 +58 1 1 0.17705481273015428 0.837733865 0.126026779 0.0362393446 1 2 0 +59 1 1 0.28623948897938623 0.7510827 0.179181576 0.06973568 1 2 0 +61 1 1 0.23046176000020194 0.7941668 0.132671311 0.07316189 1 2 0 +62 1 1 0.13173560241334381 0.8765727 0.08075007 0.04267718 1 2 0 +65 1 1 0.14168260820038769 0.8678967 0.07845409 0.05364923 1 0 2 +67 1 1 0.15281502745602504 0.858288467 0.07580672 0.06590483 1 0 2 +75 1 1 0.14692167558386063 0.8633616 0.0731101856 0.06352824 1 2 0 +78 1 1 0.36772458144318781 0.69230783 0.278533429 0.0291587356 1 2 0 +80 1 1 0.16590807826044235 0.8471241 0.07986628 0.0730095953 1 2 0 +81 1 1 0.15418537033222618 0.8571131 0.09555078 0.0473361239 1 0 2 +83 1 2 0.87913156238340517 0.5822821 0.415143281 0.00257462286 2 1 0 +84 1 1 0.60329559653785991 0.547005951 0.428067416 0.0249266215 1 2 0 +85 1 1 0.30763923553748801 0.7351805 0.189976588 0.07484292 1 2 0 +86 1 1 0.22077802043432937 0.801894665 0.161515683 0.0365896225 1 2 0 +87 1 1 0.26521281231157245 0.7670427 0.216186985 0.0167703368 1 2 0 +89 1 1 0.24918952543906914 0.779432237 0.173652634 0.0469151475 1 2 0 +94 1 1 0.27882348553441511 0.756673455 0.19962126 0.04370527 1 2 0 +101 2 2 0.48364964736145688 0.616529167 0.3817476 0.00172325119 2 1 0 +103 2 2 0.46298590319892402 0.6294015 0.369625777 0.000972708163 2 1 0 +107 2 2 0.47875168972271287 0.6195563 0.380170017 0.000273669633 2 1 0 +110 2 2 0.50353121321209937 0.604392648 0.38902545 0.00658191368 2 1 0 +114 2 2 0.42627427461954132 0.652937233 0.345770031 0.00129272381 2 1 0 +116 2 2 0.46330187402047324 0.629202664 0.369202226 0.00159509375 2 1 0 +118 2 2 0.54017556235004138 0.582645953 0.4173134 4.062326E-05 2 1 0 +119 2 2 0.59238374503353441 0.5530075 0.445467323 0.00152520277 2 1 0 +124 2 2 0.37578192391690202 0.6867521 0.311931521 0.0013163907 2 1 0 +126 2 2 0.65538868877846068 0.5192402 0.4731488 0.007610987 2 1 0 +127 2 2 0.58488203770280534 0.5571716 0.4347405 0.008087933 2 1 0 +130 2 2 0.48483716808745803 0.61579746 0.383864969 0.0003375506 2 1 0 +134 2 2 0.53564556246675443 0.5852913 0.4138755 0.0008331971 2 1 0 +135 2 2 0.4222335993197745 0.6555809 0.344069272 0.0003498588 2 1 0 +136 2 2 0.3225183821825246 0.7243226 0.2744004 0.00127698807 2 1 0 +139 2 2 0.45570650730743484 0.6339999 0.3636862 0.002313912 2 1 0 +140 2 2 0.38195300929629561 0.6825271 0.316598237 0.00087465957 2 1 0 +142 2 2 0.48364964736145688 0.616529167 0.3817476 0.00172325119 2 1 0 +143 2 2 0.37769091414905154 0.6854423 0.313964784 0.0005928655 2 1 0 +146 2 2 0.54054677541576512 0.5824297 0.415693641 0.00187665562 2 1 0 +148 2 2 0.33311820419531579 0.7166855 0.281080544 0.00223395228 2 1 0 +149 2 2 0.47519601475299389 0.62176317 0.374690771 0.00354602537 2 1 0 diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt new file mode 100644 index 0000000000..c319ba3d49 --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-out.txt @@ -0,0 +1,42 @@ +maml.exe TrainTest test=%Data% tr=PKPD{p=AvgPer { lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 +Not adding a normalizer. +Training learner (0,0) +Training calibrator. +Training learner (1,0) +Training calibrator. +Training learner (1,1) +Training calibrator. +Training learner (2,0) +Training calibrator. +Training learner (2,1) +Training calibrator. +Training learner (2,2) +Training calibrator. +Not training a calibrator because it is not needed. + +Confusion table + ||======================== +PREDICTED || 0 | 1 | 2 | Recall +TRUTH ||======================== + 0 || 50 | 0 | 0 | 1.0000 + 1 || 0 | 45 | 5 | 0.9000 + 2 || 0 | 1 | 49 | 0.9800 + ||======================== +Precision ||1.0000 |0.9783 |0.9074 | +Accuracy(micro-avg): 0.960000 +Accuracy(macro-avg): 0.960000 +Log-loss: 0.293938 +Log-loss reduction: 73.244600 + +OVERALL RESULTS +--------------------------------------- +Accuracy(micro-avg): 0.960000 (0.0000) +Accuracy(macro-avg): 0.960000 (0.0000) +Log-loss: 0.293938 (0.0000) +Log-loss reduction: 73.244600 (0.0000) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt new file mode 100644 index 0000000000..b8bbf72cd9 --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris-rp.txt @@ -0,0 +1,4 @@ +PKPD +Accuracy(micro-avg) Accuracy(macro-avg) Log-loss Log-loss reduction /p Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.96 0.96 0.293938 73.2446 AvgPer{lr=0.8} PKPD %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=PKPD{p=AvgPer { lr=0.8 }} norm=No dout=%Output% data=%Data% out=%Output% seed=1 /p:AvgPer{lr=0.8} + diff --git a/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt new file mode 100644 index 0000000000..abaafba303 --- /dev/null +++ b/test/BaselineOutput/Common/PKPD/PKPD-TrainTest-iris.txt @@ -0,0 +1,151 @@ +Instance Label Assigned Log-loss #1 Score #2 Score #3 Score #1 Class #2 Class #3 Class +0 0 0 0.10709083790126951 0.898444057 0.101555951 1.09862652E-08 0 1 2 +1 0 0 0.14972937943085041 0.860940933 0.139058977 1.03412368E-07 0 1 2 +2 0 0 0.13145025814375275 0.8768229 0.123177059 6.145917E-08 0 1 2 +3 0 0 0.15528019966440448 0.856175244 0.143824473 2.62822567E-07 0 1 2 +4 0 0 0.10252830274490825 0.9025526 0.09744736 1.04376809E-08 0 1 2 +5 0 0 0.090053147743181336 0.9138826 0.08611736 8.101242E-09 0 1 2 +6 0 0 0.12357314644074496 0.883757 0.116242908 8.74405E-08 0 1 2 +7 0 0 0.12105140488742108 0.8859884 0.114011578 3.08569952E-08 0 1 2 +8 0 0 0.17274984965939472 0.841348052 0.158651352 5.97416147E-07 0 1 2 +9 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 +10 0 0 0.093967949932628358 0.910311937 0.08968807 3.63372E-09 0 1 2 +11 0 0 0.13108575869963462 0.877142549 0.122857377 8.249825E-08 0 1 2 +12 0 0 0.15257914743793716 0.858490944 0.141508967 9.911802E-08 0 1 2 +13 0 0 0.14535615625152426 0.864714265 0.1352856 1.173372E-07 0 1 2 +14 0 0 0.06340435568818982 0.9385639 0.0614361 1.239382E-10 0 1 2 +15 0 0 0.056683708917569194 0.9448929 0.0551071428 2.807663E-10 0 1 2 +16 0 0 0.075170002932813201 0.9275858 0.07241419 1.54777513E-09 0 1 2 +17 0 0 0.10666806288443777 0.898824 0.101176038 1.5170011E-08 0 1 2 +18 0 0 0.091241664716928128 0.9127971 0.0872029141 3.50579832E-09 0 1 2 +19 0 0 0.092742902606664304 0.9114278 0.08857219 8.39613445E-09 0 1 2 +20 0 0 0.12374770832324052 0.883602738 0.116397209 2.285497E-08 0 1 2 +21 0 0 0.098256721071890374 0.9064162 0.093583785 1.62108957E-08 0 1 2 +22 0 0 0.092301570249172765 0.9118301 0.08816987 6.23606855E-09 0 1 2 +23 0 0 0.1380404335556189 0.8710635 0.12893635 2.00292092E-07 0 1 2 +24 0 0 0.15120025555722924 0.8596755 0.1403242 2.922401E-07 0 1 2 +25 0 0 0.16166208373075255 0.850728631 0.14927116 1.80764E-07 0 1 2 +26 0 0 0.12599234088657019 0.8816216 0.118378319 8.963828E-08 0 1 2 +27 0 0 0.11003318759164467 0.8958044 0.104195595 1.25205668E-08 0 1 2 +28 0 0 0.11184598225922286 0.894181967 0.105818 1.1564893E-08 0 1 2 +29 0 0 0.1505769302299601 0.860211551 0.139788255 2.15031562E-07 0 1 2 +30 0 0 0.15733441851850752 0.8544183 0.145581514 2.27139253E-07 0 1 2 +31 0 0 0.11219313224767015 0.8938716 0.1061284 1.89737328E-08 0 1 2 +32 0 0 0.076296562873109264 0.9265414 0.0734586 1.21595978E-09 0 1 2 +33 0 0 0.06454049206643403 0.937498152 0.06250186 3.385217E-10 0 1 2 +34 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 +35 0 0 0.11957675391026261 0.8872959 0.112704061 1.73822077E-08 0 1 2 +36 0 0 0.095551393128538914 0.908871651 0.09112834 2.340219E-09 0 1 2 +37 0 0 0.14791348202123955 0.862505734 0.137494177 8.105486E-08 0 1 2 +38 0 0 0.15557611714722408 0.8559219 0.144077763 2.80571015E-07 0 1 2 +39 0 0 0.11898397051842637 0.887822032 0.112177923 2.32430128E-08 0 1 2 +40 0 0 0.10383152796029192 0.901377141 0.0986228138 1.3316324E-08 0 1 2 +41 0 0 0.23267843022808721 0.792408347 0.2075885 3.16824639E-06 0 1 2 +42 0 0 0.13825641388336071 0.870875359 0.129124492 1.43657843E-07 0 1 2 +43 0 0 0.11780844245896904 0.8888663 0.11113359 1.22518585E-07 0 1 2 +44 0 0 0.11106878201188827 0.8948772 0.105122752 6.136807E-08 0 1 2 +45 0 0 0.1519822314490463 0.859003544 0.140996248 1.90226359E-07 0 1 2 +46 0 0 0.097403275899291555 0.9071901 0.09280992 9.201566E-09 0 1 2 +47 0 0 0.13975224176627257 0.869573653 0.130426213 1.23595186E-07 0 1 2 +48 0 0 0.095640521579255233 0.908790648 0.09120932 4.824783E-09 0 1 2 +49 0 0 0.12297630490939666 0.8842846 0.115715332 2.84802919E-08 0 1 2 +50 1 1 0.072140726875920644 0.930399954 0.05064142 0.018958617 1 0 2 +51 1 1 0.12052363925502865 0.886456132 0.0577918626 0.0557519831 1 2 0 +52 1 1 0.11754125629409608 0.88910383 0.0897584558 0.0211377256 1 2 0 +53 1 1 0.55234194460144326 0.5756002 0.407715857 0.01668393 1 2 0 +54 1 1 0.22033072960732095 0.8022534 0.179187492 0.018559115 1 2 0 +55 1 1 0.48471715245043789 0.61587137 0.369235158 0.0148934675 1 2 0 +56 1 1 0.19649325323392461 0.8216069 0.144622579 0.03377054 1 2 0 +57 1 1 0.23907513650017656 0.7873557 0.144207269 0.06843699 1 0 2 +58 1 1 0.10057641320176978 0.904316 0.06448696 0.03119705 1 2 0 +59 1 1 0.4382277992649205 0.6451788 0.3130982 0.0417229943 1 2 0 +60 1 1 0.40378849464830396 0.667785347 0.294686764 0.0375279263 1 2 0 +61 1 1 0.19328713066283934 0.8242453 0.117469594 0.058285147 1 2 0 +62 1 1 0.14347483184211016 0.8663426 0.101038948 0.0326184332 1 2 0 +63 1 1 0.39254042807911971 0.675339043 0.3112865 0.013374473 1 2 0 +64 1 1 0.24625037004233435 0.7817265 0.199262485 0.01901104 1 0 2 +65 1 1 0.096962049648401036 0.907590449 0.07445438 0.0179551709 1 0 2 +66 1 1 0.58127624794437671 0.559184253 0.424495757 0.01631998 1 2 0 +67 1 1 0.12717766992503393 0.8805772 0.06211802 0.0573047921 1 0 2 +68 1 2 0.76418847827461567 0.5308523 0.4657117 0.003435955 2 1 0 +69 1 1 0.17854309398479379 0.836488 0.108136833 0.05537514 1 2 0 +70 1 2 0.7782997543015937 0.532311857 0.459186077 0.008502028 2 1 0 +71 1 1 0.12963404144590399 0.878416836 0.08763818 0.0339449868 1 0 2 +72 1 2 0.82527364026390571 0.5597655 0.4381151 0.00211936235 2 1 0 +73 1 1 0.31236741131512197 0.731712639 0.2543256 0.0139617641 1 2 0 +74 1 1 0.10091901291460656 0.904006243 0.0600393079 0.03595447 1 0 2 +75 1 1 0.094227600597530603 0.9100756 0.0583470054 0.0315773822 1 0 2 +76 1 1 0.16139006589793164 0.8509601 0.13421455 0.0148253879 1 2 0 +77 1 1 0.42271312925322674 0.6552666 0.336988866 0.00774457539 1 2 0 +78 1 1 0.36102952185045717 0.6969584 0.28298 0.0200616084 1 2 0 +79 1 1 0.22191911287049176 0.800980151 0.189535379 0.009484478 1 0 2 +80 1 1 0.20087047707276379 0.8180184 0.127419531 0.05456211 1 2 0 +81 1 1 0.15758181984651309 0.8542069 0.07838817 0.0674049258 1 0 2 +82 1 1 0.14551884398560022 0.8645736 0.08538146 0.0500449426 1 0 2 +83 1 2 1.0041035759831962 0.6325462 0.3663729 0.00108090381 2 1 0 +84 1 2 0.75476486164367673 0.517363548 0.470121145 0.0125153111 2 1 0 +85 1 1 0.1936571586064853 0.823940337 0.11649999 0.05955967 1 2 0 +86 1 1 0.1117176732744818 0.8942967 0.07367582 0.032027483 1 2 0 +87 1 1 0.35151004083460119 0.7036248 0.285515 0.0108601972 1 2 0 +88 1 1 0.18955293586722866 0.8273289 0.09654 0.0761311054 1 2 0 +89 1 1 0.38389360242414333 0.6812039 0.289862335 0.0289337616 1 2 0 +90 1 1 0.62618827467785798 0.534625769 0.454234719 0.0111395335 1 2 0 +91 1 1 0.25387959125212917 0.7757852 0.200217441 0.02399734 1 2 0 +92 1 1 0.16226915412183221 0.850212336 0.09567342 0.0541142225 1 2 0 +93 1 1 0.22196815331677711 0.8009409 0.127181739 0.07187738 1 0 2 +94 1 1 0.35124306791543597 0.703812659 0.268056452 0.02813091 1 2 0 +95 1 1 0.16237179398556273 0.8501251 0.08078788 0.06908702 1 2 0 +96 1 1 0.20996992406982243 0.8106086 0.139289573 0.05010183 1 2 0 +97 1 1 0.12058893069745849 0.886398256 0.0598800667 0.05372166 1 2 0 +98 1 1 0.37956731249976106 0.6841574 0.304465353 0.01137725 1 0 2 +99 1 1 0.20277814400339583 0.816459358 0.130446717 0.0530939251 1 2 0 +100 2 2 0.39358357371715191 0.674634933 0.32519117 0.00017387759 2 1 0 +101 2 2 0.42308350591214072 0.655023932 0.344296634 0.0006794688 2 1 0 +102 2 2 0.41936704076824455 0.657462835 0.342273951 0.00026323882 2 1 0 +103 2 2 0.42921904997620342 0.6510173 0.348579019 0.000403696467 2 1 0 +104 2 2 0.41208274684418961 0.6622695 0.337504864 0.00022567909 2 1 0 +105 2 2 0.44160572409172411 0.6430031 0.3569401 5.67683346E-05 2 1 0 +106 2 2 0.42934476448549436 0.6509355 0.347463518 0.00160101533 2 1 0 +107 2 2 0.45168489764494285 0.6365547 0.363331944 0.000113324531 2 1 0 +108 2 2 0.46645653711885576 0.627220869 0.372643322 0.000135785289 2 1 0 +109 2 2 0.35953291146180011 0.6980023 0.301614881 0.000382813538 2 1 0 +110 2 2 0.60830855615290891 0.5442707 0.451300323 0.004428956 2 1 0 +111 2 2 0.43425567366017515 0.6477466 0.35168618 0.0005671874 2 1 0 +112 2 2 0.42224814643798159 0.655571342 0.343755931 0.000672725844 2 1 0 +113 2 2 0.43378520195992787 0.648051441 0.351433426 0.000515127 2 1 0 +114 2 2 0.39828542770724013 0.671470344 0.328003943 0.00052572944 2 1 0 +115 2 2 0.38044631650760474 0.683556259 0.3153522 0.00109154719 2 1 0 +116 2 2 0.44214870680472357 0.642654061 0.3565694 0.0007765472 2 1 0 +117 2 2 0.39204321311235024 0.6756749 0.3240996 0.000225500859 2 1 0 +118 2 2 0.49534872735975943 0.6093584 0.390629977 1.165714E-05 2 1 0 +119 2 2 0.47792706875518332 0.6200674 0.379401267 0.0005313261 2 1 0 +120 2 2 0.38996755329866611 0.677078843 0.322423726 0.0004974509 2 1 0 +121 2 2 0.40418317857084346 0.667521834 0.331344754 0.00113342493 2 1 0 +122 2 2 0.46674185713969735 0.627041936 0.372923285 3.47993519E-05 2 1 0 +123 2 2 0.50860202326236292 0.601335645 0.396473944 0.00219042762 2 1 0 +124 2 2 0.40115690949067428 0.669545 0.3297638 0.0006912321 2 1 0 +125 2 2 0.47796417410708669 0.6200444 0.379430741 0.000524864765 2 1 0 +126 2 2 0.55674263926678424 0.573072731 0.422982275 0.00394499162 2 1 0 +127 2 2 0.53745603926068541 0.5842326 0.4114353 0.004332072 2 1 0 +128 2 2 0.42435197720848 0.6541936 0.3455436 0.000262821937 2 1 0 +129 2 2 0.59539649422184571 0.5513439 0.447734416 0.000921661733 2 1 0 +130 2 2 0.45136920579127138 0.6367557 0.363094628 0.000149650616 2 1 0 +131 2 1 0.87799286296834833 0.58302784 0.415616274 0.00135587773 1 2 0 +132 2 2 0.4217459387360496 0.655900657 0.343858719 0.0002406203 2 1 0 +133 2 2 0.58858419496856473 0.55511266 0.442362428 0.00252491026 2 1 0 +134 2 2 0.46954434981246995 0.6252871 0.3744188 0.000294059661 2 1 0 +135 2 2 0.41926206364690544 0.657531857 0.3422878 0.000180329866 2 1 0 +136 2 2 0.35983106354881655 0.6977942 0.301589876 0.000615945552 2 1 0 +137 2 2 0.43598868576509842 0.646625042 0.3524654 0.000909582246 2 1 0 +138 2 2 0.55146544914408413 0.576104939 0.418423772 0.00547132 2 1 0 +139 2 2 0.47595285901721079 0.62129277 0.377271771 0.00143543689 2 1 0 +140 2 2 0.38358127985275575 0.6814167 0.318166882 0.000416446419 2 1 0 +141 2 2 0.55824128131859518 0.572214544 0.4245338 0.00325164315 2 1 0 +142 2 2 0.42308350591214072 0.655023932 0.344296634 0.0006794688 2 1 0 +143 2 2 0.39101453828348787 0.6763703 0.32335642 0.000273291429 2 1 0 +144 2 2 0.36601283135925683 0.6934939 0.306069136 0.0004369896 2 1 0 +145 2 2 0.41354127957626624 0.661304235 0.337492228 0.00120350742 2 1 0 +146 2 2 0.44489957920965528 0.640888631 0.358320177 0.000791185652 2 1 0 +147 2 2 0.45420191711312169 0.6349545 0.363470852 0.00157464272 2 1 0 +148 2 2 0.36235400852316502 0.6960359 0.302856565 0.00110750983 2 1 0 +149 2 2 0.42624816689431871 0.6529543 0.3454746 0.00157109811 2 1 0 diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index 4342fcc49f..a08795ca4c 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -233,6 +233,17 @@ public void MultiClassCVTest() Done(); } + [Fact] + [TestCategory("Multiclass")] + public void MulticlassReductionTest() + { + RunOneAllTests(TestLearners.Ova, TestDatasets.iris); + RunOneAllTests(TestLearners.OvaWithFastForest, TestDatasets.iris); + RunOneAllTests(TestLearners.Pkpd, TestDatasets.iris); + + Done(); + } + [Fact(Skip = "Need CoreTLC specific baseline update")] [TestCategory("Clustering")] [TestCategory("KMeans")] diff --git a/test/Microsoft.ML.TestFramework/Learners.cs b/test/Microsoft.ML.TestFramework/Learners.cs index f7062c3c4a..fd0e7d6aaa 100644 --- a/test/Microsoft.ML.TestFramework/Learners.cs +++ b/test/Microsoft.ML.TestFramework/Learners.cs @@ -130,6 +130,25 @@ static TestLearnersBase() Tag = "AveragedPerceptron-Reg" }; + public static PredictorAndArgs Ova = new PredictorAndArgs + { + Trainer = new SubComponent("OVA", "p=AvgPer{ lr=0.8 }"), + MamlArgs = new[] { "norm=no" }, + }; + + public static PredictorAndArgs OvaWithFastForest = new PredictorAndArgs + { + Trainer = new SubComponent("OVA", "p=FastForest{ }"), + MamlArgs = new[] { "norm=no" }, + Tag = "OVA-FastForest", + }; + + public static PredictorAndArgs Pkpd = new PredictorAndArgs + { + Trainer = new SubComponent("PKPD", "p=AvgPer { lr=0.8 }"), + MamlArgs = new[] { "norm=no" }, + }; + // Old. public static PredictorAndArgs perceptronDefault = new PredictorAndArgs {